diff --git a/Custom_EasyBlocks/README.md b/Custom_EasyBlocks/README.md index 1bf702ef5f00b7819825c8b85d03fa017e74a254..80c71f5e7cd592ddf8b1b65241d4dff15b16de67 100644 --- a/Custom_EasyBlocks/README.md +++ b/Custom_EasyBlocks/README.md @@ -2,9 +2,34 @@ Overview of the custom EasyBlocks. -## Hypre +## MPICH -- __*added by*__ s.achilles -- __*needed because*__ optional CUDA support is required. -- __*difference compared to upstream*__ option `withcuda` to make CUDA support optional even when CUDA is loaded (which is the default in our MPI toolchains). -- __*can be removed*__ - +- __*added by*__ d.alvarez +- __*needed because*__ optional `check_static_libs` parameter in the sanity check (used with `psmpi.py`) +- __*difference compared to upstream*__ the aforementioned parameter +- __*can not be removed*__ at least until that option (needed just for `psmpi.py`) is merged upstream + +## PSMPI + +- __*added by*__ d.alvarez +- __*needed because*__ PGO options needed to build in JURECA booster. CUDA support +- __*difference compared to upstream*__ all the supporting code to enable the 2 things mentioned above +- __*can not be removed*__ at least until the PGO options are dropped (when JURECA booster is decomissioned) and the CUDA options are pushed upstream + +## CODE_SATURNE + +- __*added by*__ m.cakircali +- __*needed because*__ there is no support to install `code_saturn` upstream +- __*can not be removed*__ at least until the easyblock is added upstream + +## NVIDIA_DRIVER + +- __*added by*__ d.alvarez +- __*needed because*__ we custom-install the NVIDIA driver libraries in the EB stack +- __*can not be removed*__ at least until the easyblock is added upstream + +## generic/SYSTEM_BUNDLE + +- __*added by*__ d.alvarez +- __*needed because*__ it is the basic support for the MPI settings modules +- __*can not be removed*__ diff --git a/Custom_EasyBlocks/bazel.py b/Custom_EasyBlocks/bazel.py deleted file mode 100644 index 6f6b19fbcf549ed2d12d023687baeff71ed8b093..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/bazel.py +++ /dev/null @@ -1,219 +0,0 @@ -## -# Copyright 2009-2021 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing Bazel, implemented as an easyblock -""" -from distutils.version import LooseVersion -import glob -import os -import tempfile - -import easybuild.tools.environment as env -from easybuild.framework.easyblock import EasyBlock -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import apply_regex_substitutions, copy_file, which -from easybuild.tools.modules import get_software_root, get_software_version -from easybuild.tools.run import run_cmd -from easybuild.framework.easyconfig import CUSTOM - - -class EB_Bazel(EasyBlock): - """Support for building/installing Bazel.""" - - @staticmethod - def extra_options(): - """Extra easyconfig parameters specific to EB_Bazel.""" - extra_vars = { - 'static': [None, 'Build statically linked executables ' + - '(default: True for Bazel >= 1.0 else False)', CUSTOM], - } - return EasyBlock.extra_options(extra_vars) - - def fixup_hardcoded_paths(self): - """Patch out hard coded paths to compiler and binutils tools""" - binutils_root = get_software_root('binutils') - gcc_root = get_software_root('GCCcore') or get_software_root('GCC') - gcc_ver = get_software_version( - 'GCCcore') or get_software_version('GCC') - - # only patch Bazel scripts if binutils & GCC installation prefix could be determined - if not binutils_root or not gcc_root: - self.log.info( - "Not patching Bazel build scripts, installation prefix for binutils/GCC not found") - return - - # replace hardcoded paths in (unix_)cc_configure.bzl - # hard-coded paths in (unix_)cc_configure.bzl were removed in 0.19.0 - if LooseVersion(self.version) < LooseVersion('0.19.0'): - regex_subs = [ - (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')), - (r'"/usr/bin', '"' + os.path.join(binutils_root, 'bin')), - ] - for conf_bzl in ['cc_configure.bzl', 'unix_cc_configure.bzl']: - filepath = os.path.join('tools', 'cpp', conf_bzl) - if os.path.exists(filepath): - apply_regex_substitutions(filepath, regex_subs) - - # replace hardcoded paths in CROSSTOOL - # CROSSTOOL script is no longer there in Bazel 0.24.0 - if LooseVersion(self.version) < LooseVersion('0.24.0'): - res = glob.glob(os.path.join(gcc_root, 'lib', - 'gcc', '*', gcc_ver, 'include')) - if res and len(res) == 1: - gcc_lib_inc = res[0] - else: - raise EasyBuildError( - "Failed to pinpoint location of GCC include files: %s", res) - - gcc_lib_inc_bis = os.path.join( - os.path.dirname(gcc_lib_inc), 'include-fixed') - if not os.path.exists(gcc_lib_inc_bis): - self.log.info( - "Derived directory %s does not exist, falling back to %s", gcc_lib_inc_bis, gcc_lib_inc) - gcc_lib_inc_bis = gcc_lib_inc - - gcc_cplusplus_inc = os.path.join( - gcc_root, 'include', 'c++', gcc_ver) - if not os.path.exists(gcc_cplusplus_inc): - raise EasyBuildError( - "Derived directory %s does not exist", gcc_cplusplus_inc) - - regex_subs = [ - (r'-B/usr/bin', '-B%s' % os.path.join(binutils_root, 'bin')), - (r'(cxx_builtin_include_directory:.*)/usr/lib/gcc', - r'\1%s' % gcc_lib_inc), - (r'(cxx_builtin_include_directory:.*)/usr/local/include', - r'\1%s' % gcc_lib_inc_bis), - (r'(cxx_builtin_include_directory:.*)/usr/include', - r'\1%s' % gcc_cplusplus_inc), - ] - for tool in ['ar', 'cpp', 'dwp', 'gcc', 'ld']: - path = which(tool) - if path: - regex_subs.append( - (os.path.join('/usr', 'bin', tool), path)) - else: - raise EasyBuildError( - "Failed to determine path to '%s'", tool) - - apply_regex_substitutions(os.path.join( - 'tools', 'cpp', 'CROSSTOOL'), regex_subs) - - def prepare_step(self, *args, **kwargs): - """Setup bazel output root""" - super(EB_Bazel, self).prepare_step(*args, **kwargs) - self.bazel_tmp_dir = tempfile.mkdtemp( - suffix='-bazel-tmp', dir=self.builddir) - self.output_user_root = tempfile.mkdtemp( - suffix='-bazel-root', dir=self.builddir) - - def extract_step(self): - """Extract Bazel sources.""" - # Older Bazel won't build when the output_user_root is a subfolder of the source folder - # So create a dedicated source folder - self.cfg.update('unpack_options', '-d src') - super(EB_Bazel, self).extract_step() - - def configure_step(self): - """Custom configuration procedure for Bazel.""" - - # Last instance of hardcoded paths was removed in 0.24.0 - if LooseVersion(self.version) < LooseVersion('0.24.0'): - self.fixup_hardcoded_paths() - - # Keep temporary directory in case of error. EB will clean it up on success - apply_regex_substitutions(os.path.join('scripts', 'bootstrap', 'buildenv.sh'), [ - (r'atexit cleanup_tempdir_.*', '') - ]) - - # enable building in parallel - bazel_args = '--jobs=%d' % self.cfg['parallel'] - - # Bazel provides a JDK by itself for some architectures - # We want to enforce it using the JDK we provided via modules - # This is required for Power where Bazel does not have a JDK, but requires it for building itself - # See https://github.com/bazelbuild/bazel/issues/10377 - bazel_args += ' --host_javabase=@local_jdk//:jdk' - - # Link C++ libs statically, see https://github.com/bazelbuild/bazel/issues/4137 - static = self.cfg['static'] - if static is None: - # Works for Bazel 1.x and higher - static = LooseVersion(self.version) >= LooseVersion('1.0.0') - if static: - env.setvar('BAZEL_LINKOPTS', '-static-libstdc++:-static-libgcc') - env.setvar('BAZEL_LINKLIBS', '-l%:libstdc++.a') - - env.setvar('EXTRA_BAZEL_ARGS', bazel_args) - env.setvar('EMBED_LABEL', self.version) - env.setvar('VERBOSE', 'yes') - - def build_step(self): - """Custom build procedure for Bazel.""" - cmd = ' '.join([ - # The initial bootstrap of bazel is done in TMPDIR - "export TMPDIR='%s' &&" % self.bazel_tmp_dir, - self.cfg['prebuildopts'], - # Show the commands the script is running to faster debug failures - "bash -c 'set -x && ./compile.sh'", - ]) - run_cmd(cmd, log_all=True, simple=True, log_ok=True) - - def test_step(self): - """Test the compilation""" - - runtest = self.cfg['runtest'] - if runtest: - # This could be used to pass options to Bazel: runtest = '--bazel-opt=foo test' - if runtest is True: - runtest = 'test' - cmd = " ".join([ - self.cfg['pretestopts'], - os.path.join('output', 'bazel'), - # Avoid bazel using $HOME - '--output_user_root=%s' % self.output_user_root, - runtest, - '--jobs=%d' % self.cfg['parallel'], - '--host_javabase=@local_jdk//:jdk', - # Be more verbose - '--subcommands', '--verbose_failures', - # Just build tests - '--build_tests_only', - self.cfg['testopts'] - ]) - run_cmd(cmd, log_all=True, simple=True) - - def install_step(self): - """Custom install procedure for Bazel.""" - copy_file(os.path.join('output', 'bazel'), - os.path.join(self.installdir, 'bin', 'bazel')) - - def sanity_check_step(self): - """Custom sanity check for Bazel.""" - custom_paths = { - 'files': ['bin/bazel'], - 'dirs': [], - } - super(EB_Bazel, self).sanity_check_step(custom_paths=custom_paths) diff --git a/Custom_EasyBlocks/cp2k.py b/Custom_EasyBlocks/cp2k.py deleted file mode 100644 index 2e1144254a298056db829f0c74ac92d26ac93627..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/cp2k.py +++ /dev/null @@ -1,888 +0,0 @@ -## -# Copyright 2009-2019 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing CP2K, implemented as an easyblock - -@author: Stijn De Weirdt (Ghent University) -@author: Dries Verdegem (Ghent University) -@author: Kenneth Hoste (Ghent University) -@author: Pieter De Baets (Ghent University) -@author: Jens Timmerman (Ghent University) -@author: Ward Poelmans (Ghent University) -@author: Luca Marsella (CSCS) -@author: Damian Alvarez (Forschungszentrum Juelich GmbH) -@author: Alan O'Cais (Forschungszentrum Juelich GmbH) -@author: Balazs Hajgato (Free University Brussels (VUB)) -""" - -import fileinput -import glob -import re -import os -import sys -from distutils.version import LooseVersion - -import easybuild.tools.toolchain as toolchain -from easybuild.framework.easyblock import EasyBlock -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.environment import setvar -from easybuild.tools.filetools import change_dir, copy_dir, copy_file, mkdir, write_file -from easybuild.tools.config import build_option -from easybuild.tools.modules import get_software_root, get_software_version -from easybuild.tools.run import run_cmd -from easybuild.tools.systemtools import get_avail_core_count - - -class EB_CP2K(EasyBlock): - """ - Support for building CP2K - - prepare module include files if required - - generate custom config file in 'arch' directory - - build CP2K - - run regression test if desired - - install by copying binary executables - """ - - def __init__(self, *args, **kwargs): - super(EB_CP2K, self).__init__(*args, **kwargs) - - self.typearch = None - - # this should be set to False for old versions of GCC (e.g. v4.1) - self.compilerISO_C_BINDING = True - - # compiler options that need to be set in Makefile - self.debug = '' - self.fpic = '' - - # used for both libsmm and libxsmm - self.libsmm = '' - self.modincpath = '' - self.openmp = '' - - self.make_instructions = '' - - @staticmethod - def extra_options(): - extra_vars = { - 'extracflags': ['', "Extra CFLAGS to be added", CUSTOM], - 'extradflags': ['', "Extra DFLAGS to be added", CUSTOM], - 'ignore_regtest_fails': [False, ("Ignore failures in regression test " - "(should be used with care)"), CUSTOM], - 'library': [False, "Also build CP2K as a library", CUSTOM], - 'maxtasks': [4, ("Maximum number of CP2K instances run at " - "the same time during testing"), CUSTOM], - 'modinc': [[], ("List of modinc's to use (*.f90], or 'True' to use " - "all found at given prefix"), CUSTOM], - 'modincprefix': ['', "Intel MKL prefix for modinc include dir", CUSTOM], - 'runtest': [True, "Build and run CP2K tests", CUSTOM], - 'omp_num_threads': [None, "Value to set $OMP_NUM_THREADS to during testing", CUSTOM], - 'plumed': [None, "Enable PLUMED support", CUSTOM], - 'cuda': [True, "Enable CUDA support", CUSTOM], - 'type': ['popt', "Type of build ('popt' or 'psmp')", CUSTOM], - 'typeopt': [True, "Enable optimization", CUSTOM], - } - return EasyBlock.extra_options(extra_vars) - - def _generate_makefile(self, options): - """Generate Makefile based on options dictionary and optional make instructions""" - - text = "# Makefile generated by CP2K easyblock in EasyBuild\n" - for key, value in sorted(options.items()): - text += "%s = %s\n" % (key, value) - return text + self.make_instructions - - def configure_step(self): - """Configure build - - build Libint wrapper - - generate Makefile - """ - - known_types = ['popt', 'psmp'] - if self.cfg['type'] not in known_types: - raise EasyBuildError("Unknown build type specified: '%s', known types are %s", - self.cfg['type'], known_types) - self.log.info("initial start_dir %s" %self.cfg['start_dir']) - cpstring="cp -r %s../dbcsr-2.0.1/* %sexts/dbcsr/" %(self.cfg['start_dir'],self.cfg['start_dir']) -# run_cmd(cpstring) - self.log.info(cpstring) - os.system(cpstring) - - # correct start dir, if needed - # recent CP2K versions have a 'cp2k' dir in the unpacked 'cp2k' dir - cp2k_path = os.path.join(self.cfg['start_dir'], 'cp2k') - if os.path.exists(cp2k_path): - self.cfg['start_dir'] = cp2k_path - self.log.info("Corrected start_dir to %s" % self.cfg['start_dir']) - - # set compilers options according to toolchain config - # full debug: -g -traceback -check all -fp-stack-check - # -g links to mpi debug libs - if self.toolchain.options['debug']: - self.debug = '-g' - self.log.info("Debug build") - if self.toolchain.options['pic']: - self.fpic = "-fPIC" - self.log.info("Using fPIC") - - # report on extra flags being used - if self.cfg['extracflags']: - self.log.info("Using extra CFLAGS: %s" % self.cfg['extracflags']) - if self.cfg['extradflags']: - self.log.info("Using extra DFLAGS: %s" % self.cfg['extradflags']) - - # lib(x)smm support - libsmm = get_software_root('libsmm') - libxsmm = get_software_root('libxsmm') - if libxsmm: - self.cfg.update('extradflags', '-D__LIBXSMM') - self.libsmm = '-lxsmm -lxsmmf' - self.log.debug('Using libxsmm %s' % libxsmm) - elif libsmm: - libsmms = glob.glob(os.path.join(libsmm, 'lib', 'libsmm_*nn.a')) - dfs = [os.path.basename(os.path.splitext(x)[0]).replace('lib', '-D__HAS_') for x in libsmms] - moredflags = ' ' + ' '.join(dfs) - self.cfg.update('extradflags', moredflags) - self.libsmm = ' '.join(libsmms) - self.log.debug('Using libsmm %s (extradflags %s)' % (self.libsmm, moredflags)) - - # obtain list of modinc's to use - if self.cfg["modinc"]: - self.modincpath = self.prepmodinc() - - # set typearch - self.typearch = "Linux-x86-64-%s" % self.toolchain.name - - # extra make instructions - self.make_instructions = '' # "graphcon.o: graphcon.F\n\t$(FC) -c $(FCFLAGS2) $<\n" - - # compiler toolchain specific configuration - comp_fam = self.toolchain.comp_family() - if comp_fam == toolchain.INTELCOMP: - options = self.configure_intel_based() - elif comp_fam == toolchain.GCC: - options = self.configure_GCC_based() - else: - raise EasyBuildError("Don't know how to tweak configuration for compiler family %s" % comp_fam) - - # BLAS/FFTW - if get_software_root('imkl'): - options = self.configure_MKL(options) - else: - # BLAS - if get_software_root('ACML'): - options = self.configure_ACML(options) - else: - options = self.configure_BLAS_lib(options) - - # FFTW (no MKL involved) - if 'fftw3' in os.getenv('LIBFFT', ''): - options = self.configure_FFTW3(options) - - # LAPACK - if os.getenv('LIBLAPACK_MT', None) is not None: - options = self.configure_LAPACK(options) - - if os.getenv('LIBSCALAPACK', None) is not None: - options = self.configure_ScaLAPACK(options) - - # PLUMED - plumed = get_software_root('PLUMED') - if self.cfg['plumed'] and not plumed: - raise EasyBuildError("The PLUMED module needs to be loaded to build CP2K with PLUMED support") - - # enable PLUMED support if PLUMED is listed as a dependency - # and PLUMED support is either explicitly enabled (plumed = True) or unspecified ('plumed' not defined) - if plumed and (self.cfg['plumed'] or self.cfg['plumed'] is None): - options['LIBS'] += ' -lplumed' - options['DFLAGS'] += ' -D__PLUMED2' - - # ELPA - elpa = get_software_root('ELPA') - if elpa: - options['LIBS'] += ' -lelpa' - elpa_inc_dir = os.path.join(elpa, 'include', 'elpa-%s' % get_software_version('ELPA'), 'modules') - options['FCFLAGSOPT'] += ' -I%s ' % elpa_inc_dir - if LooseVersion(self.version) >= LooseVersion('6.1'): - elpa_ver = ''.join(get_software_version('ELPA').split('.')[:2]) - options['DFLAGS'] += ' -D__ELPA=%s' % elpa_ver - elpa_inc_dir = os.path.join(elpa, 'include', 'elpa-%s' % get_software_version('ELPA'), 'elpa') - options['FCFLAGSOPT'] += ' -I%s ' % elpa_inc_dir - else: - options['DFLAGS'] += ' -D__ELPA3' - - # CUDA - if self.cfg['cuda']: - cuda = get_software_root('CUDA') - if cuda: - options['DFLAGS'] += ' -D__ACC -D__DBCSR_ACC' - options['LIBS'] += ' -lcudart -lcublas -lcufft -lrt' - options['NVCC'] = ' nvcc' - - # avoid group nesting - options['LIBS'] = options['LIBS'].replace('-Wl,--start-group', '').replace('-Wl,--end-group', '') - - options['LIBS'] = "-Wl,--start-group %s -Wl,--end-group" % options['LIBS'] - - # specify correct location for 'data' directory in final installation - options['DATA_DIR'] = os.path.join(self.installdir, 'data') - - # create arch file using options set - archfile = os.path.join(self.cfg['start_dir'], 'arch', '%s.%s' % (self.typearch, self.cfg['type'])) - txt = self._generate_makefile(options) - write_file(archfile, txt) - self.log.info("Content of makefile (%s):\n%s" % (archfile, txt)) - - def prepmodinc(self): - """Prepare list of module files""" - - self.log.debug("Preparing module files") - - imkl = get_software_root('imkl') - - if imkl: - - # prepare modinc target path - modincpath = os.path.join(os.path.dirname(os.path.normpath(self.cfg['start_dir'])), 'modinc') - self.log.debug("Preparing module files in %s" % modincpath) - - mkdir(modincpath, parents=True) - - # get list of modinc source files - modincdir = os.path.join(imkl, self.cfg["modincprefix"], 'include') - - if isinstance(self.cfg["modinc"], list): - modfiles = [os.path.join(modincdir, x) for x in self.cfg["modinc"]] - - elif isinstance(self.cfg["modinc"], bool) and self.cfg["modinc"]: - modfiles = glob.glob(os.path.join(modincdir, '*.f90')) - - else: - raise EasyBuildError("prepmodinc: Please specify either a boolean value or a list of files in modinc " - "(found: %s).", self.cfg["modinc"]) - - f77 = os.getenv('F77') - if not f77: - raise EasyBuildError("F77 environment variable not set, can't continue.") - - # create modinc files - for f in modfiles: - if f77.endswith('ifort'): - cmd = "%s -module %s -c %s" % (f77, modincpath, f) - elif f77 in ['gfortran', 'mpif77']: - cmd = "%s -J%s -c %s" % (f77, modincpath, f) - else: - raise EasyBuildError("prepmodinc: Unknown value specified for F77 (%s)", f77) - - run_cmd(cmd, log_all=True, simple=True) - - return modincpath - else: - raise EasyBuildError("Don't know how to prepare modinc, imkl not found") - - def configure_common(self): - """Common configuration for all toolchains""" - - # openmp introduces 2 major differences - # -automatic is default: -noautomatic -auto-scalar - # some mem-bandwidth optimisation - if self.cfg['type'] == 'psmp': - self.openmp = self.toolchain.get_flag('openmp') - - # determine which opt flags to use - if self.cfg['typeopt']: - optflags = 'OPT' - regflags = 'OPT2' - else: - optflags = 'NOOPT' - regflags = 'NOOPT' - - # make sure a MPI-2 able MPI lib is used - mpi2 = False - if hasattr(self.toolchain, 'MPI_FAMILY') and self.toolchain.MPI_FAMILY is not None: - known_mpi2_fams = [toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2, toolchain.OPENMPI, - toolchain.INTELMPI] - mpi_fam = self.toolchain.mpi_family() - if mpi_fam in known_mpi2_fams: - mpi2 = True - self.log.debug("Determined MPI2 compatibility based on MPI toolchain component: %s" % mpi_fam) - else: - self.log.debug("Cannot determine MPI2 compatibility based on MPI toolchain component: %s" % mpi_fam) - else: - # can't use toolchain.mpi_family, because of system toolchain - mpi2libs = ['impi', 'MVAPICH2', 'OpenMPI', 'MPICH2', 'MPICH'] - for mpi2lib in mpi2libs: - if get_software_root(mpi2lib): - mpi2 = True - self.log.debug("Determined MPI2 compatibility based on loaded MPI module: %s") - else: - self.log.debug("MPI-2 supporting MPI library %s not loaded.") - - if not mpi2: - raise EasyBuildError("CP2K needs MPI-2, no known MPI-2 supporting library loaded?") - - cppflags = os.getenv('CPPFLAGS') - ldflags = os.getenv('LDFLAGS') - cflags = os.getenv('CFLAGS') - fflags = os.getenv('FFLAGS') - fflags_lowopt = re.sub('-O[0-9]', '-O1', fflags) - options = { - 'CC': os.getenv('MPICC'), - 'CPP': '', - 'FC': '%s %s' % (os.getenv('MPIF90'), self.openmp), - 'LD': '%s %s' % (os.getenv('MPIF90'), self.openmp), - 'AR': 'ar -r', - 'CPPFLAGS': '', - - 'FPIC': self.fpic, - 'DEBUG': self.debug, - - 'FCFLAGS': '$(FCFLAGS%s)' % optflags, - 'FCFLAGS2': '$(FCFLAGS%s)' % regflags, - - 'CFLAGS': ' %s %s %s $(FPIC) $(DEBUG) %s ' % (cflags, cppflags, ldflags, self.cfg['extracflags']), - 'DFLAGS': ' -D__parallel -D__BLACS -D__SCALAPACK -D__FFTSG %s' % self.cfg['extradflags'], - - 'LIBS': os.getenv('LIBS', ''), - - 'FCFLAGSNOOPT': '$(DFLAGS) $(CFLAGS) -O0 $(FREE) $(FPIC) $(DEBUG)', - 'FCFLAGSOPT': '%s $(FREE) $(SAFE) $(FPIC) $(DEBUG)' % fflags, - 'FCFLAGSOPT2': '%s $(FREE) $(SAFE) $(FPIC) $(DEBUG)' % fflags_lowopt, - } - - libint = get_software_root('LibInt') - if libint: - options['DFLAGS'] += ' -D__LIBINT' - - libintcompiler = "%s %s" % (os.getenv('CC'), os.getenv('CFLAGS')) - - # Build libint-wrapper, if required - libint_wrapper = '' - - # required for old versions of GCC - if not self.compilerISO_C_BINDING: - options['DFLAGS'] += ' -D__HAS_NO_ISO_C_BINDING' - - # determine path for libint_tools dir - libinttools_paths = ['libint_tools', 'tools/hfx_tools/libint_tools'] - libinttools_path = None - for path in libinttools_paths: - path = os.path.join(self.cfg['start_dir'], path) - if os.path.isdir(path): - libinttools_path = path - change_dir(libinttools_path) - if not libinttools_path: - raise EasyBuildError("No libinttools dir found") - - # build libint wrapper - cmd = "%s -c libint_cpp_wrapper.cpp -I%s/include" % (libintcompiler, libint) - if not run_cmd(cmd, log_all=True, simple=True): - raise EasyBuildError("Building the libint wrapper failed") - libint_wrapper = '%s/libint_cpp_wrapper.o' % libinttools_path - - # determine Libint libraries based on major version number - libint_maj_ver = get_software_version('Libint').split('.')[0] - if libint_maj_ver == '1': - libint_libs = "$(LIBINTLIB)/libderiv.a $(LIBINTLIB)/libint.a $(LIBINTLIB)/libr12.a" - elif libint_maj_ver == '2': - # libint_libs = "$(LIBINTLIB)/libint2.a" - libint_libs = "-L$(LIBINTLIB) -lint2" - else: - raise EasyBuildError("Don't know how to handle libint version %s", libint_maj_ver) - self.log.info("Using Libint version %s" % (libint_maj_ver)) - - options['LIBINTLIB'] = '%s/lib' % libint - options['LIBS'] += ' %s -lstdc++ %s' % (libint_libs, libint_wrapper) - - # add Libint include dir to $FCFLAGS - options['FCFLAGS'] += ' -I' + os.path.join(libint, 'include') - - else: - # throw a warning, since CP2K without Libint doesn't make much sense - self.log.warning("Libint module not loaded, so building without Libint support") - - libxc = get_software_root('libxc') - if libxc: - cur_libxc_version = get_software_version('libxc') - if LooseVersion(self.version) >= LooseVersion('6.1'): - libxc_min_version = '4.0.3' - options['DFLAGS'] += ' -D__LIBXC' - else: - libxc_min_version = '2.0.1' - options['DFLAGS'] += ' -D__LIBXC2' - - if LooseVersion(cur_libxc_version) < LooseVersion(libxc_min_version): - raise EasyBuildError("This version of CP2K is not compatible with libxc < %s" % libxc_min_version) - - if LooseVersion(cur_libxc_version) >= LooseVersion('4.0.3'): - # cfr. https://www.cp2k.org/howto:compile#k_libxc_optional_wider_choice_of_xc_functionals - options['LIBS'] += ' -L%s/lib -lxcf03 -lxc' % libxc - elif LooseVersion(cur_libxc_version) >= LooseVersion('2.2'): - options['LIBS'] += ' -L%s/lib -lxcf90 -lxc' % libxc - else: - options['LIBS'] += ' -L%s/lib -lxc' % libxc - self.log.info("Using Libxc-%s" % cur_libxc_version) - options['FCFLAGSOPT'] += ' -I%s/include ' %libxc - else: - self.log.info("libxc module not loaded, so building without libxc support") - - return options - - def configure_intel_based(self): - """Configure for Intel based toolchains""" - - # based on guidelines available at - # http://software.intel.com/en-us/articles/build-cp2k-using-intel-fortran-compiler-professional-edition/ - intelurl = ''.join(["http://software.intel.com/en-us/articles/", - "build-cp2k-using-intel-fortran-compiler-professional-edition/"]) - - options = self.configure_common() - - extrainc = '' - if self.modincpath: - extrainc = '-I%s' % self.modincpath - - options.update({ - # -Vaxlib : older options - 'FREE': '-fpp -free', - - # SAFE = -assume protect_parens -fp-model precise -ftz # causes problems, so don't use this - 'SAFE': '-assume protect_parens -no-unroll-aggressive', - - 'INCFLAGS': '$(DFLAGS) -I$(INTEL_INC) -I$(INTEL_INCF) %s' % extrainc, - - 'LDFLAGS': '$(INCFLAGS) ', - 'OBJECTS_ARCHITECTURE': 'machine_intel.o', - }) - - options['DFLAGS'] += ' -D__INTEL' - - options['FCFLAGSOPT'] += ' $(INCFLAGS) -heap-arrays 64' - options['FCFLAGSOPT2'] += ' $(INCFLAGS) -heap-arrays 64' - - ifortver = LooseVersion(get_software_version('ifort')) - - # -i-static has been deprecated prior to 2013, but was still usable. From 2015 it is not. - if ifortver < LooseVersion("2013"): - options['LDFLAGS'] += ' -i-static ' - else: - options['LDFLAGS'] += ' -static-intel ' - - # Otherwise it fails on linking, since there are 2 definitions of main - if LooseVersion(self.version) >= LooseVersion('4.1'): - options['LDFLAGS'] += ' -nofor-main ' - - failmsg = "CP2K won't build correctly with the Intel %%s compilers prior to %%s, see %s" % intelurl - fypp="{}tools/build_utils/fypp".format(self.cfg['start_dir']) - fyppsrc="{}src".format(self.cfg['start_dir']) - - self.make_instructions += "pao_param_linpot.o:{}/pao_param_linpot.F\n\t{} -n $< pao_param_linpot.F90\n\t$(FC) -c -I{} -Iexts/dbcsr $(FCFLAGS2) pao_param_linpot.F90\n\n".format(fyppsrc,fypp,fyppsrc) - self.make_instructions += "lri_forces.o:{}/lri_forces.F\n\t{} -n $< lri_forces.F90\n\t$(FC) -c -I{} -Iexts/dbcsr $(FCFLAGS2) lri_forces.F90\n\n".format(fyppsrc,fypp,fyppsrc) - self.make_instructions += "mp2_optimize_ri_basis.o:{}/mp2_optimize_ri_basis.F\n\t{} -n $< mp2_optimize_ri_basis.F90\n\t$(FC) -c -I{} -Iexts/dbcsr $(FCFLAGS2) mp2_optimize_ri_basis.F90\n\n".format(fyppsrc,fypp,fyppsrc) - self.make_instructions += "lri_optimize_ri_basis.o:{}/lri_optimize_ri_basis.F\n\t{} -n $< lri_optimize_ri_basis.F90\n\t$(FC) -c -I{} -Iexts/dbcsr $(FCFLAGS2) lri_optimize_ri_basis.F90\n\n".format(fyppsrc,fypp,fyppsrc) - self.make_instructions += "hfx_contraction_methods.o:{}/hfxbase/hfx_contraction_methods.F\n\t{} -n $< hfx_contraction_methods.F90\n\t$(FC) -c -I{}/hfxbase -Iexts/dbcsr $(FCFLAGS2) hfx_contraction_methods.F90\n\n".format(fyppsrc,fypp,fyppsrc) - - return options - - def configure_GCC_based(self): - """Configure for GCC based toolchains""" - options = self.configure_common() - - options.update({ - # need this to prevent "Unterminated character constant beginning" errors - 'FREE': '-ffree-form -ffree-line-length-none', - - 'LDFLAGS': '$(FCFLAGS)', - 'OBJECTS_ARCHITECTURE': 'machine_gfortran.o', - }) - - options['DFLAGS'] += ' -D__GFORTRAN' - - options['FCFLAGSOPT'] += ' $(DFLAGS) $(CFLAGS) -fmax-stack-var-size=32768' - options['FCFLAGSOPT2'] += ' $(DFLAGS) $(CFLAGS)' - - return options - - def configure_ACML(self, options): - """Configure for AMD Math Core Library (ACML)""" - - openmp_suffix = '' - if self.openmp: - openmp_suffix = '_mp' - - options['ACML_INC'] = '%s/gfortran64%s/include' % (get_software_root('ACML'), openmp_suffix) - options['CFLAGS'] += ' -I$(ACML_INC) -I$(FFTW_INC)' - options['DFLAGS'] += ' -D__FFTACML' - - blas = os.getenv('LIBBLAS', '') - blas = blas.replace('gfortran64', 'gfortran64%s' % openmp_suffix) - options['LIBS'] += ' %s %s %s' % (self.libsmm, os.getenv('LIBSCALAPACK', ''), blas) - - return options - - def configure_BLAS_lib(self, options): - """Configure for BLAS library.""" - options['LIBS'] += ' %s %s' % (self.libsmm, os.getenv('LIBBLAS', '')) - return options - - def configure_MKL(self, options): - """Configure for Intel Math Kernel Library (MKL)""" - - options['INTEL_INC'] = '$(MKLROOT)/include' - options['DFLAGS'] += ' -D__FFTW3' - - extra = '' - if self.modincpath: - extra = '-I%s' % self.modincpath - options['CFLAGS'] += ' -I$(INTEL_INC) %s $(FPIC) $(DEBUG)' % extra - - options['LIBS'] += ' %s %s' % (self.libsmm, os.getenv('LIBSCALAPACK', '')) - - fftw_root = get_software_root('FFTW') - if fftw_root: - libfft = '-lfftw3' - if self.cfg['type'] == 'psmp': - libfft += ' -lfftw3_omp' - - options['CFLAGS'] += ' -I$(INTEL_INCF)' - options['INTEL_INCF'] = os.path.join(fftw_root, 'include') - options['LIBS'] += ' -L%s %s' % (os.path.join(fftw_root, 'lib'), libfft) - - else: - # only use Intel FFTW wrappers if FFTW is not loaded - options['CFLAGS'] += ' -I$(INTEL_INCF)' - options['DFLAGS'] += ' -D__FFTMKL' - options['INTEL_INCF'] = '$(INTEL_INC)/fftw' - options['LIBS'] = '%s %s' % (os.getenv('LIBFFT', ''), options['LIBS']) - - return options - - def configure_FFTW3(self, options): - """Configure for FFTW3""" - - options.update({ - 'FFTW_INC': os.getenv('FFT_INC_DIR', ''), # GCC - 'FFTW3INC': os.getenv('FFT_INC_DIR', ''), # Intel - 'FFTW3LIB': os.getenv('FFT_LIB_DIR', ''), # Intel - }) - - options['DFLAGS'] += ' -D__FFTW3' - if self.cfg['type'] == 'psmp': - libfft = os.getenv('LIBFFT_MT', '') - else: - libfft = os.getenv('LIBFFT', '') - options['LIBS'] += ' -L%s %s' % (os.getenv('FFT_LIB_DIR', '.'), libfft) - - return options - - def configure_LAPACK(self, options): - """Configure for LAPACK library""" - options['LIBS'] += ' %s' % os.getenv('LIBLAPACK_MT', '') - return options - - def configure_ScaLAPACK(self, options): - """Configure for ScaLAPACK library""" - - options['LIBS'] += ' %s' % os.getenv('LIBSCALAPACK', '') - - return options - - def build_step(self): - """Start the actual build - - go into makefiles dir - - patch Makefile - -build_and_install - """ - -# makefiles = os.path.join(self.cfg['start_dir'], 'makefiles') -# try: -# os.chdir(makefiles) -# except OSError, err: -# raise EasyBuildError("Can't change to makefiles dir %s: %s", makefiles, err) - -# # modify makefile for parallel build -# parallel = self.cfg['parallel'] -# if parallel: -# -# try: -# for line in fileinput.input('Makefile', inplace=1, backup='.orig.patchictce'): -# line = re.sub(r"^PMAKE\s*=.*$", "PMAKE\t= $(SMAKE) -j %s" % parallel, line) -# sys.stdout.write(line) -# except IOError, err: -# raise EasyBuildError("Can't modify/write Makefile in %s: %s", makefiles, err) -# - # update make options with MAKE - self.cfg.update('buildopts', 'MAKE="make -j %s"' % self.cfg['parallel']) - - # update make options with ARCH and VERSION - self.cfg.update('buildopts', 'ARCH=%s VERSION=%s' % (self.typearch, self.cfg['type'])) - - cmd = "make %s" % self.cfg['buildopts'] - - # clean first - run_cmd(cmd + " clean", log_all=True, simple=True, log_output=True) - - # build and install - if self.cfg['library']: - cmd += ' libcp2k' - run_cmd(cmd + " all", log_all=True, simple=True, log_output=True) - - def test_step(self): - """Run regression test.""" - - if self.cfg['runtest']: - - if not build_option('mpi_tests'): - self.log.info("Skipping testing of CP2K since MPI testing is disabled") - return - - if self.cfg['omp_num_threads']: - setvar('OMP_NUM_THREADS', self.cfg['omp_num_threads']) - - # change to root of build dir - try: - os.chdir(self.builddir) - except OSError as err: - raise EasyBuildError("Failed to change to %s: %s", self.builddir, err) - - # use regression test reference output if available - # try and find an unpacked directory that starts with 'LAST-' - regtest_refdir = None - for d in os.listdir(self.builddir): - if d.startswith("LAST-"): - regtest_refdir = d - break - - # location of do_regtest script - cfg_fn = "cp2k_regtest.cfg" - regtest_script = os.path.join(self.cfg['start_dir'], 'tools', 'regtesting', 'do_regtest') - regtest_cmd = "%s -nosvn -nobuild -config %s" % (regtest_script, cfg_fn) - # older version of CP2K - if not os.path.exists(regtest_script): - regtest_script = os.path.join(self.cfg['start_dir'], 'tools', 'do_regtest') - regtest_cmd = "%s -nocvs -quick -nocompile -config %s" % (regtest_script, cfg_fn) - - # patch do_regtest so that reference output is used - if regtest_refdir: - self.log.info("Using reference output available in %s" % regtest_refdir) - try: - for line in fileinput.input(regtest_script, inplace=1, backup='.orig.refout'): - line = re.sub(r"^(dir_last\s*=\${dir_base})/.*$", r"\1/%s" % regtest_refdir, line) - sys.stdout.write(line) - except IOError as err: - raise EasyBuildError("Failed to modify '%s': %s", regtest_script, err) - - else: - self.log.info("No reference output found for regression test, just continuing without it...") - - # prefer using 4 cores, since some tests require/prefer square (n^2) numbers or powers of 2 (2^n) - test_core_cnt = min(self.cfg['parallel'], 4) - if get_avail_core_count() < test_core_cnt: - raise EasyBuildError("Cannot run MPI tests as not enough cores (< %s) are available", test_core_cnt) - else: - self.log.info("Using %s cores for the MPI tests" % test_core_cnt) - - # configure regression test - cfg_txt = '\n'.join([ - 'FORT_C_NAME="%(f90)s"', - 'dir_base=%(base)s', - 'cp2k_version=%(cp2k_version)s', - 'dir_triplet=%(triplet)s', - 'export ARCH=${dir_triplet}', - 'cp2k_dir=%(cp2k_dir)s', - 'leakcheck="YES"', - 'maxtasks=%(maxtasks)s', - 'cp2k_run_prefix="%(mpicmd_prefix)s"', - ]) % { - 'f90': os.getenv('F90'), - 'base': os.path.dirname(os.path.normpath(self.cfg['start_dir'])), - 'cp2k_version': self.cfg['type'], - 'triplet': self.typearch, - 'cp2k_dir': os.path.basename(os.path.normpath(self.cfg['start_dir'])), - 'maxtasks': self.cfg['maxtasks'], - 'mpicmd_prefix': self.toolchain.mpi_cmd_for('', test_core_cnt), - } - - write_file(cfg_fn, cfg_txt) - self.log.debug("Contents of %s: %s" % (cfg_fn, cfg_txt)) - - # run regression test - (regtest_output, ec) = run_cmd(regtest_cmd, log_all=True, simple=False, log_output=True) - - if ec == 0: - self.log.info("Regression test output:\n%s" % regtest_output) - else: - raise EasyBuildError("Regression test failed (non-zero exit code): %s", regtest_output) - - # pattern to search for regression test summary - re_pattern = "number\s+of\s+%s\s+tests\s+(?P<cnt>[0-9]+)" - - # find total number of tests - regexp = re.compile(re_pattern % "", re.M | re.I) - res = regexp.search(regtest_output) - tot_cnt = None - if res: - tot_cnt = int(res.group('cnt')) - else: - raise EasyBuildError("Finding total number of tests in regression test summary failed") - - # function to report on regtest results - def test_report(test_result): - """Report on tests with given result.""" - - postmsg = '' - - test_result = test_result.upper() - regexp = re.compile(re_pattern % test_result, re.M | re.I) - - cnt = None - res = regexp.search(regtest_output) - if not res: - raise EasyBuildError("Finding number of %s tests in regression test summary failed", - test_result.lower()) - else: - cnt = int(res.group('cnt')) - - logmsg = "Regression test reported %s / %s %s tests" - logmsg_values = (cnt, tot_cnt, test_result.lower()) - - # failed tests indicate problem with installation - # wrong tests are only an issue when there are excessively many - if (test_result == "FAILED" and cnt > 0) or (test_result == "WRONG" and (cnt / tot_cnt) > 0.1): - if self.cfg['ignore_regtest_fails']: - self.log.warning(logmsg, *logmsg_values) - self.log.info("Ignoring failures in regression test, as requested.") - else: - raise EasyBuildError(logmsg, *logmsg_values) - elif test_result == "CORRECT" or cnt == 0: - self.log.info(logmsg, *logmsg_values) - else: - self.log.warning(logmsg, *logmsg_values) - - return postmsg - - # number of failed/wrong tests, will report error if count is positive - self.postmsg += test_report("FAILED") - self.postmsg += test_report("WRONG") - - # number of new tests, will be high if a non-suitable regtest reference was used - # will report error if count is positive (is that what we want?) - self.postmsg += test_report("NEW") - - # number of correct tests: just report - test_report("CORRECT") - - def install_step(self): - """Install built CP2K - - copy from exe to bin - - copy data dir (if exists) - - copy tests - """ - - # copy executables - exedir = os.path.join(self.cfg['start_dir'], 'exe', self.typearch) - targetdir = os.path.join(self.installdir, 'bin') - copy_dir(exedir, targetdir) - - # copy libraries and include files, not sure what is strictly required so we take everything - if self.cfg['library']: - libdir = os.path.join(self.cfg['start_dir'], 'lib', self.typearch, self.cfg['type']) - targetdir = os.path.join(self.installdir, 'lib') - copy_dir(libdir, targetdir) - # Also need to populate the include directory - targetdir = os.path.join(self.installdir, 'include') - libcp2k_header = os.path.join(self.cfg['start_dir'], 'src', 'start', 'libcp2k.h') - target_header = os.path.join(targetdir, os.path.basename(libcp2k_header)) - copy_file(libcp2k_header, target_header) - # include all .mod files for fortran users (don't know the exact list so take everything) - mod_path = os.path.join(self.cfg['start_dir'], 'obj', self.typearch, self.cfg['type']) - for mod_file in glob.glob(os.path.join(mod_path, '*.mod')): - target_mod = os.path.join(targetdir, os.path.basename(mod_file)) - copy_file(mod_file, target_mod) - - # copy data dir - datadir = os.path.join(self.cfg['start_dir'], 'data') - targetdir = os.path.join(self.installdir, 'data') - if os.path.exists(targetdir): - self.log.info("Won't copy data dir. Destination directory %s already exists" % targetdir) - elif os.path.exists(datadir): - copy_dir(datadir, targetdir) - else: - self.log.info("Won't copy data dir. Source directory %s does not exist" % datadir) - - # copy tests -# targetdir = os.path.join(self.installdir, 'tests') -# if os.path.exists(targetdir): -# self.log.info("Won't copy tests. Destination directory %s already exists" % targetdir) -# else: -# copy_dir(srctests, targetdir) - # tests constitute several thousand files - # rather tar them and copy to tests.tgz - srctests = os.path.join(self.cfg['start_dir'], 'tests') - tmcmd = "tar -czf {}/tests.tgz {} ".format(self.installdir,srctests); - os.system(tmcmd) - - # copy regression test results - if self.cfg['runtest']: - try: - testdir = os.path.dirname(os.path.normpath(self.cfg['start_dir'])) - for d in os.listdir(testdir): - if d.startswith('TEST-%s-%s' % (self.typearch, self.cfg['type'])): - path = os.path.join(testdir, d) - target = os.path.join(self.installdir, d) - copy_dir(path, target) - self.log.info("Regression test results dir %s copied to %s" % (d, self.installdir)) - break - except (OSError, IOError) as err: - raise EasyBuildError("Failed to copy regression test results dir: %s", err) - - def sanity_check_step(self): - """Custom sanity check for CP2K""" - - cp2k_type = self.cfg['type'] - custom_paths = { - 'files': ["bin/%s.%s" % (x, cp2k_type) for x in ["cp2k", "cp2k_shell"]] + ["tests.tgz"], - 'dirs': ["data"] - } - if self.cfg['library']: - custom_paths['files'].append(os.path.join('lib', 'libcp2k.a')) - custom_paths['files'].append(os.path.join('include', 'libcp2k.h')) - custom_paths['files'].append(os.path.join('include', 'libcp2k.mod')) - super(EB_CP2K, self).sanity_check_step(custom_paths=custom_paths) - - def make_module_extra(self): - """Set up a CP2K_DATA_DIR environment variable to find CP2K provided basis sets""" - - txt = super(EB_CP2K, self).make_module_extra() - - # also define $CP2K_DATA_DIR in module, - # even though CP2K was already configured to pick up 'data' from install dir - # this could be useful for users to access the 'data' dir in a documented way (and it doesn't hurt) - datadir = os.path.join(self.installdir, 'data') - if os.path.exists(datadir): - txt += self.module_generator.set_environment('CP2K_DATA_DIR', datadir) - - return txt diff --git a/Custom_EasyBlocks/cpmd.py b/Custom_EasyBlocks/cpmd.py deleted file mode 100644 index 8f53a899e8d44b4769632afc70274d36b3cc5301..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/cpmd.py +++ /dev/null @@ -1,306 +0,0 @@ -## -# Copyright 2016 Landcare Research NZ Ltd -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# the Hercules foundation (http://www.herculesstichting.be/in_English) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# http://github.com/hpcugent/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing CPMD, implemented as an easyblock - -@author: Benjamin Roberts (Landcare Research NZ Ltd) -@author: Damian Alvarez (Forschungszentrum Juelich GmbH) -""" - -from distutils.version import LooseVersion -import glob -import os -import platform -import re -import shutil -import sys - -from easybuild.easyblocks.generic.configuremake import ConfigureMake -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -import easybuild.tools.environment as env -from easybuild.tools.filetools import apply_regex_substitutions, extract_file -from easybuild.tools.modules import get_software_root, get_software_version -from easybuild.tools.run import run_cmd -from easybuild.tools.systemtools import get_os_type -import easybuild.tools.toolchain as toolchain - -class EB_CPMD(ConfigureMake): - """ - Support for building CPMD - """ - - @staticmethod - def extra_options(): - """Custom easyconfig parameters for CPMD.""" - extra_vars = { - 'base_configuration': [None, "Base configuration from which to start (file name)", CUSTOM], - } - return ConfigureMake.extra_options(extra_vars) - - def prepare_step(self): - super(EB_CPMD, self).prepare_step() - - # create install directory and make sure it does not get cleaned up again in the install step; - # the first configure iteration already puts things in place in the install directory, - # so that shouldn't get cleaned up afterwards... - self.log.info("Creating install dir %s before starting configure-build-install iterations", self.installdir) - self.make_installdir() - self.cfg['keeppreviousinstall'] = True - - def configure_step(self, cmd_prefix=''): - """ - Configure step - """ - - config_file_candidates = [] - - for confdirname in ["configure", "CONFIGURE"]: - config_file_prefix = os.path.join(self.builddir, "CPMD", confdirname) - if os.path.isdir(config_file_prefix): - break - else: - raise EasyBuildError("No directory containing configuration files. Please review source tarball contents, and amend the EasyBlock if necessary") - - # Work out a starting configuration file if one is not supplied in the easyconfig - if self.cfg['base_configuration']: - config_file_base = self.cfg['base_configuration'] - else: - os_type_mappings = { - "LINUX" : "LINUX", - "DARWIN" : "MACOSX", - } - os_type = os_type_mappings[get_os_type().upper()] - machine = "" - if os_type != "MACOSX": - machine = platform.machine().upper() - - config_file_base = os_type - if len(machine) > 0: - config_file_base += "-" + machine - - if self.toolchain.comp_family() in [toolchain.INTELCOMP]: - config_file_base += "-INTEL" - - # enable MPI support if desired - if self.toolchain.options.get('usempi', None): - config_file_base += "-MPI" - - # Note that the -FFTW and -FFTW3 options are always at the end - # of the configuration file name, so this block needs to come - # last within the "else". - # Also, only version 3 or greater of FFTW is useful for CPMD. - if get_software_root('imkl') or (get_software_root('FFTW') and LooseVersion(get_software_version('FFTW')) >= LooseVersion('3.0')): - config_file_base += "-FFTW" - config_file_candidates.append(config_file_base + "3") - - config_file_candidates.append(config_file_base) - - selected_base_config = None - selected_full_config = None - for cfc in config_file_candidates: - self.log.info("Trying configuration file: %s", cfc) - config_file_full = os.path.join(config_file_prefix, cfc) - if os.path.isfile(config_file_full): - selected_base_config = cfc - selected_full_config = config_file_full - self.log.info("Selected %s as base configuration file", cfc) - break - - if selected_base_config is None: - raise EasyBuildError("Base configuration file does not exist. Please edit base_configuration or review the CPMD easyblock.") - - try: - apply_regex_substitutions(selected_full_config, [ - # Better to get CC and FC from the EasyBuild environment in this instance - (r"^(\s*CC=.*)", r"#\1"), - (r"^(\s*FC=.*)", r"#\1"), - (r"^(\s*LD)=.*", r"\1='$(FC)'"), - ]) - except IOError as err: - raise EasyBuildError("Failed to patch %s: %s", selected_base_config, err) - - if self.cfg['configure_cmd_prefix']: - if cmd_prefix: - tup = (cmd_prefix, self.cfg['configure_cmd_prefix']) - self.log.debug("Specified cmd_prefix '%s' is overruled by configure_cmd_prefix '%s'" % tup) - cmd_prefix = self.cfg['configure_cmd_prefix'] - - if self.cfg['tar_config_opts']: - # setting am_cv_prog_tar_ustar avoids that configure tries to figure out - # which command should be used for tarring/untarring - # am__tar and am__untar should be set to something decent (tar should work) - tar_vars = { - 'am__tar': 'tar chf - "$$tardir"', - 'am__untar': 'tar xf -', - 'am_cv_prog_tar_ustar': 'easybuild_avoid_ustar_testing' - } - for (key, val) in tar_vars.items(): - self.cfg.update('preconfigopts', "%s='%s'" % (key, val)) - - options = [self.cfg['configopts']] - - # enable OpenMP support if desired - if self.toolchain.options.get('openmp', None) and LooseVersion(self.version) >= LooseVersion('4.0'): - options.append("-omp") - - # This "option" has to come last as it's the chief argument, coming after - # all flags and so forth. - options.append(selected_base_config) - - # I'm not sure when mkconfig.sh changed to configure.sh. Assuming 4.0 - # for the sake of the argument. - if LooseVersion(self.version) >= LooseVersion('4.0'): - config_exe = 'configure.sh' - else: - config_exe = 'mkconfig.sh' - options.append('-BIN={0}'.format(os.path.join(self.installdir, "bin"))) - options.append('>') - options.append(os.path.join(self.installdir, "Makefile")) - - cmd = "%(preconfigopts)s %(cmd_prefix)s./%(config_exe)s %(prefix_opt)s%(installdir)s %(configopts)s" % { - 'preconfigopts': self.cfg['preconfigopts'], - 'cmd_prefix': cmd_prefix, - 'config_exe': config_exe, - 'prefix_opt': self.cfg['prefix_opt'], - 'installdir': self.installdir, - 'configopts': ' '.join(options), - } - - (out, _) = run_cmd(cmd, log_all=True, simple=False) - - return out - - def build_step(self): - - """ - Make some changes to files in order to make the build process more EasyBuild-friendly - """ - os.chdir(self.installdir) - if LooseVersion(self.version) < LooseVersion('4.0'): - os.mkdir("bin") - # Master configure script - makefile = os.path.join(self.installdir, "Makefile") - try: - apply_regex_substitutions(makefile, [ - (r"^(\s*LFLAGS\s*=.*[^\w-])-L/usr/lib64/atlas/([^\w-].*)$", r"\1\2"), - (r"^(\s*LFLAGS\s*=.*[^\w-])-llapack([^\w-].*)$", r"\1\2"), - (r"^(\s*LFLAGS\s*=.*[^\w-])-lblas([^\w-].*)$", r"\1\2"), - (r"^(\s*LFLAGS\s*=.*[^\w-])-lfftw([^\w-].*)$", r"\1\2"), - ]) - if self.toolchain.comp_family() in [toolchain.INTELCOMP]: - preproc_flag = "-fpp" - ar_exe = "xiar -ruv" - apply_regex_substitutions(makefile, [ - (r"^(\s*AR\s*=).*", r"\1 {0}".format(ar_exe)) - ]) - if LooseVersion(self.version) < LooseVersion('4.0'): - apply_regex_substitutions(makefile, [ - (r"^(\s*CFLAGS\s*=.*[^\w-])-O2([^\w-].*)$", r"\1\2"), - (r"^(\s*CFLAGS\s*=.*[^\w-])-Wall([^\w-].*)$", r"\1\2"), - (r"^(\s*CPPFLAGS\s*=.*[^\w-])-D__PGI([^\w-].*)$", r"\1\2"), - (r"^(\s*CPPFLAGS\s*=.*[^\w-])-D__GNU([^\w-].*)$", r"\1\2"), - (r"^(\s*FFLAGS\s*=.*[^\w-])-O2([^\w-].*)$", r"\1\2"), - (r"^(\s*FFLAGS\s*=.*[^\w-])-fcray-pointer([^\w-].*)$", r"\1\2"), - ]) - - if preproc_flag is None: - preproc_flag = '' - - apply_regex_substitutions(makefile, [ - (r"^(\s*CPPFLAGS\s*=.*)", r"\1 {0}".format(os.getenv('CPPFLAGS'))), - (r"^(\s*CFLAGS\s*=.*)", r"\1 {0}".format(os.getenv('CFLAGS'))), - (r"^(\s*FFLAGS\s*=.*)", r"\1 {0}".format(os.getenv('FFLAGS'))), - (r"^(\s*LFLAGS\s*=.*)", r"\1 {0}".format(os.getenv('LDFLAGS'))), - - # Allow to define own XFLAGS - (r"# CPPFLAGS =", r"CPPFLAGS +="), - (r"# CFLAGS =", r"CFLAGS +="), - (r"# FFLAGS =", r"FFLAGS +="), - (r"# LFLAGS =", r"LFLAGS +="), - - # Add preprocessing options to FFLAGS and NOOPT_FLAG - (r"NOOPT_FLAG =", r"NOOPT_FLAG = {0}".format(preproc_flag)), - (r"FFLAGS =", r"FFLAGS = {0}".format(preproc_flag)), - - ]) - if self.toolchain.options.get('openmp', None): - apply_regex_substitutions(makefile, [ - (r"^(\s*LFLAGS\s*=.*)", r"\1 {0} {1}".format(os.getenv('LIBLAPACK_MT'), os.getenv('LIBBLAS_MT'))) - ]) - else: - apply_regex_substitutions(makefile, [ - (r"^(\s*LFLAGS\s*=.*)", r"\1 {0} {1}".format(os.getenv('LIBLAPACK'), os.getenv('LIBBLAS'))) - ]) - apply_regex_substitutions(makefile, [ - (r"^(\s*LFLAGS\s*=.*)", r"\1 {0}".format(os.getenv('LIBFFT'))), - ]) - - if get_software_root('imkl'): - if LooseVersion(self.version) < LooseVersion('4.0'): - apply_regex_substitutions(makefile, [ - (r"(\s+)-DFFT_FFTW(\s+)", r"\1-DFFT_DEFAULT -DINTEL_MKL\2"), - ]) - if LooseVersion(self.version) >= LooseVersion('4.0'): - apply_regex_substitutions(makefile, [ - (r"^(\s*CC\s*=.*)", r"#\1"), - (r"^(\s*FC\s*=.*)", r"#\1"), - ]) - except IOError as err: - raise EasyBuildError("Failed to patch %s: %s", makefile, err) - - super(EB_CPMD, self).build_step() - - def extract_step(self): - """ - Unpack the source files. - """ - for src in self.src: - if 'pseudo' not in src['name']: - self.log.info("Unpacking source %s" % src['name']) - srcdir = extract_file(src['path'], self.builddir, cmd=src['cmd'], extra_options=self.cfg['unpack_options']) - if srcdir: - self.src[self.src.index(src)]['finalpath'] = srcdir - else: - raise EasyBuildError("Unpacking source %s failed", src['name']) - else: - self.log.info("Not extracting %s, treating it as pseudopotentials to be installed separately" % src['name']) - - # No need for a separate install step as the software is built in situ. - # In fact, an install step throws away the entire package. - # However, we consider the pseudopotentials and extract them in lib/ - def install_step(self): - """ - Unpack the pseudopotentials if listed as source - """ - for src in self.src: - if 'pseudo' in src['name']: - self.log.info("Unpacking pseudopotentials file %s" % src['name']) - srcdir = extract_file(src['path'], '/'.join([self.installdir, 'lib']), cmd=src['cmd'], extra_options=self.cfg['unpack_options']) - if srcdir: - self.src[self.src.index(src)]['finalpath'] = srcdir - else: - raise EasyBuildError("Unpacking source %s failed", src['name']) - diff --git a/Custom_EasyBlocks/cuda.py b/Custom_EasyBlocks/cuda.py deleted file mode 100644 index 600117f3e382179aa30bd4b496b58edd4b8bdb5f..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/cuda.py +++ /dev/null @@ -1,266 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2012-2019 Cyprus Institute / CaSToRC, Uni.Lu, NTUA, Ghent University, Forschungszentrum Juelich GmbH -# Authors:: George Tsouloupas <g.tsouloupas@cyi.ac.cy>, Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste, Damian Alvarez -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-99.html -## -""" -EasyBuild support for CUDA, implemented as an easyblock - -Ref: https://speakerdeck.com/ajdecon/introduction-to-the-cuda-toolkit-for-building-applications - -@author: George Tsouloupas (Cyprus Institute) -@author: Fotis Georgatos (Uni.lu) -@author: Kenneth Hoste (Ghent University) -@author: Damian Alvarez (Forschungszentrum Juelich) -@author: Ward Poelmans (Free University of Brussels) -""" -import os -import re -import stat - -from distutils.version import LooseVersion - -from easybuild.easyblocks.generic.binary import Binary -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import adjust_permissions, patch_perl_script_autoflush -from easybuild.tools.filetools import read_file, remove_file, which, write_file -from easybuild.tools.run import run_cmd, run_cmd_qa -from easybuild.tools.systemtools import POWER, X86_64, get_cpu_architecture, get_shared_lib_ext - -# Wrapper script definition -WRAPPER_TEMPLATE = """#!/bin/sh -echo "$@" | grep -e '-ccbin' -e '--compiler-bindir' > /dev/null -if [ $? -eq 0 ]; -then - echo "ERROR: do not set -ccbin or --compiler-bindir when using the `basename $0` wrapper" -else - nvcc -ccbin=%s "$@" - exit $? -fi """ - - -class EB_CUDA(Binary): - """ - Support for installing CUDA. - """ - - @staticmethod - def extra_options(): - """Create a set of wrappers based on a list determined by the easyconfig file""" - extra_vars = { - 'host_compilers': [None, "Host compilers for which a wrapper will be generated", CUSTOM] - } - return Binary.extra_options(extra_vars) - - def __init__(self, *args, **kwargs): - """ Init the cuda easyblock adding a new cudaarch template var """ - myarch = get_cpu_architecture() - if myarch == X86_64: - cudaarch = '' - elif myarch == POWER: - cudaarch = '_ppc64le' - else: - raise EasyBuildError("Architecture %s is not supported for CUDA on EasyBuild", myarch) - - super(EB_CUDA, self).__init__(*args, **kwargs) - - self.cfg.template_values['cudaarch'] = cudaarch - self.cfg.generate_template_values() - - def extract_step(self): - """Extract installer to have more control, e.g. options, patching Perl scripts, etc.""" - execpath = self.src[0]['path'] - run_cmd("/bin/sh " + execpath + " --noexec --nox11 --target " + self.builddir) - self.src[0]['finalpath'] = self.builddir - - def install_step(self): - """Install CUDA using Perl install script.""" - - # define how to run the installer - # script has /usr/bin/perl hardcoded, but we want to have control over which perl is being used - if LooseVersion(self.version) <= LooseVersion("5"): - install_interpreter = "perl" - install_script = "install-linux.pl" - self.cfg.update('installopts', '--prefix=%s' % self.installdir) - elif LooseVersion(self.version) > LooseVersion("5") and LooseVersion(self.version) < LooseVersion("10.1"): - install_interpreter = "perl" - install_script = "cuda-installer.pl" - # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut - self.cfg.update('installopts', "-verbose -silent -toolkitpath=%s -toolkit" % self.installdir) - else: - install_interpreter = "" - install_script = "./cuda-installer" - # note: also including samples (via "-samplespath=%(installdir)s -samples") would require libglut - self.cfg.update('installopts', "--silent --toolkit --toolkitpath=%s --defaultroot=%s" % ( - self.installdir, self.installdir)) - - if LooseVersion("10.0") < LooseVersion(self.version) < LooseVersion("10.2") and get_cpu_architecture() == POWER: - # Workaround for - # https://devtalk.nvidia.com/default/topic/1063995/cuda-setup-and-installation/cuda-10-1-243-10-1-update-2-ppc64le-run-file-installation-issue/ - install_script = " && ".join([ - "mkdir -p %(installdir)s/targets/ppc64le-linux/include", - "([ -e %(installdir)s/include ] || ln -s targets/ppc64le-linux/include %(installdir)s/include)", - "cp -r %(builddir)s/builds/cublas/src %(installdir)s/.", - install_script - ]) % { - 'installdir': self.installdir, - 'builddir': self.builddir - } - - # Use C locale to avoid localized questions and crash on CUDA 10.1 - self.cfg.update('preinstallopts', "export LANG=C && ") - - cmd = "%(preinstallopts)s %(interpreter)s %(script)s %(installopts)s" % { - 'preinstallopts': self.cfg['preinstallopts'], - 'interpreter': install_interpreter, - 'script': install_script, - 'installopts': self.cfg['installopts'] - } - - # prepare for running install script autonomously - qanda = {} - stdqa = { - # this question is only asked if CUDA tools are already available system-wide - r"Would you like to remove all CUDA files under .*? (yes/no/abort): ": "no", - } - noqanda = [ - r"^Configuring", - r"Installation Complete", - r"Verifying archive integrity.*", - r"^Uncompressing NVIDIA CUDA", - r".* -> .*", - ] - - # patch install script to handle Q&A autonomously - if install_interpreter == "perl": - patch_perl_script_autoflush(os.path.join(self.builddir, install_script)) - - # make sure $DISPLAY is not defined, which may lead to (weird) problems - # this is workaround for not being able to specify --nox11 to the Perl install scripts - if 'DISPLAY' in os.environ: - os.environ.pop('DISPLAY') - - # cuda-installer creates /tmp/cuda-installer.log (ignoring TMPDIR) - # Try to remove it before running the installer. - # This will fail with a usable error if it can't be removed - # instead of segfaulting in the cuda-installer. - remove_file('/tmp/cuda-installer.log') - - # overriding maxhits default value to 1000 (seconds to wait for nothing to change in the output - # without seeing a known question) - run_cmd_qa(cmd, qanda, std_qa=stdqa, no_qa=noqanda, log_all=True, simple=True, maxhits=1000) - - # Remove the cuda-installer log file - remove_file('/tmp/cuda-installer.log') - - # check if there are patches to apply - if len(self.src) > 1: - for patch in self.src[1:]: - self.log.debug("Running patch %s", patch['name']) - run_cmd("/bin/sh " + patch['path'] + " --accept-eula --silent --installdir=" + self.installdir) - - def post_install_step(self): - """Create wrappers for the specified host compilers and generate the appropriate stub symlinks""" - def create_wrapper(wrapper_name, wrapper_comp): - """Create for a particular compiler, with a particular name""" - wrapper_f = os.path.join(self.installdir, 'bin', wrapper_name) - write_file(wrapper_f, WRAPPER_TEMPLATE % wrapper_comp) - perms = stat.S_IXUSR | stat.S_IRUSR | stat.S_IXGRP | stat.S_IRGRP | stat.S_IXOTH | stat.S_IROTH - adjust_permissions(wrapper_f, perms) - - # Prepare wrappers to handle a default host compiler other than g++ - for comp in (self.cfg['host_compilers'] or []): - create_wrapper('nvcc_%s' % comp, comp) - - ldconfig = which('ldconfig') - sbin_dirs = ['/sbin', '/usr/sbin'] - if not ldconfig: - # ldconfig is usually in /sbin or /usr/sbin - for cand_path in sbin_dirs: - if os.path.exists(os.path.join(cand_path, 'ldconfig')): - ldconfig = os.path.join(cand_path, 'ldconfig') - break - - # fail if we couldn't find ldconfig, because it's really needed - if ldconfig: - self.log.info("ldconfig found at %s", ldconfig) - else: - path = os.environ.get('PATH', '') - raise EasyBuildError("Unable to find 'ldconfig' in $PATH (%s), nor in any of %s", path, sbin_dirs) - - # Run ldconfig to create missing symlinks in the stubs directory (libcuda.so.1, etc) - cmd = ' '.join([ldconfig, '-N', os.path.join(self.installdir, 'lib64', 'stubs')]) - run_cmd(cmd) - - super(EB_CUDA, self).post_install_step() - - def sanity_check_step(self): - """Custom sanity check for CUDA.""" - - shlib_ext = get_shared_lib_ext() - - chk_libdir = ["lib64"] - - # Versions higher than 6 do not provide 32 bit libraries - if LooseVersion(self.version) < LooseVersion("6"): - chk_libdir += ["lib"] - - culibs = ["cublas", "cudart", "cufft", "curand", "cusparse"] - custom_paths = { - 'files': [os.path.join("bin", x) for x in ["fatbinary", "nvcc", "nvlink", "ptxas"]] + - [os.path.join("%s", "lib%s.%s") % (x, y, shlib_ext) for x in chk_libdir for y in culibs], - 'dirs': ["include"], - } - - if LooseVersion(self.version) < LooseVersion('7'): - custom_paths['files'].append(os.path.join('open64', 'bin', 'nvopencc')) - if LooseVersion(self.version) >= LooseVersion('7'): - custom_paths['files'].append(os.path.join("extras", "CUPTI", "lib64", "libcupti.%s") % shlib_ext) - custom_paths['dirs'].append(os.path.join("extras", "CUPTI", "include")) - - super(EB_CUDA, self).sanity_check_step(custom_paths=custom_paths) - - def make_module_extra(self): - """Set the install directory as CUDA_HOME, CUDA_ROOT, CUDA_PATH.""" - txt = super(EB_CUDA, self).make_module_extra() - txt += self.module_generator.set_environment('CUDA_HOME', self.installdir) - txt += self.module_generator.set_environment('CUDA_ROOT', self.installdir) - txt += self.module_generator.set_environment('CUDA_PATH', self.installdir) - self.log.debug("make_module_extra added this: %s", txt) - return txt - - def make_module_req_guess(self): - """Specify CUDA custom values for PATH etc.""" - - guesses = super(EB_CUDA, self).make_module_req_guess() - - # The dirs should be in the order ['open64/bin', 'bin'] - bin_path = [] - if LooseVersion(self.version) < LooseVersion('7'): - bin_path.append(os.path.join('open64', 'bin')) - bin_path.append('bin') - - lib_path = ['lib64'] - inc_path = ['include'] - if LooseVersion(self.version) >= LooseVersion('7'): - lib_path.append(os.path.join('extras', 'CUPTI', 'lib64')) - inc_path.append(os.path.join('extras', 'CUPTI', 'include')) - bin_path.append(os.path.join('nvvm', 'bin')) - lib_path.append(os.path.join('nvvm', 'lib64')) - inc_path.append(os.path.join('nvvm', 'include')) - - guesses.update({ - 'PATH': bin_path, - 'LD_LIBRARY_PATH': lib_path, - 'LIBRARY_PATH': ['lib64', os.path.join('lib64', 'stubs')], - 'CPATH': inc_path, - }) - - return guesses diff --git a/Custom_EasyBlocks/elpa.py b/Custom_EasyBlocks/elpa.py deleted file mode 100644 index 837178418f09f3300dfc6dda6c80caf0481d7553..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/elpa.py +++ /dev/null @@ -1,257 +0,0 @@ -## -# Copyright 2009-2020 Ghent University -# Copyright 2019 Micael Oliveira -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing ELPA, implemented as an easyblock - -@author: Micael Oliveira (MPSD-Hamburg) -@author: Kenneth Hoste (Ghent University) -@author: Damian Alvarez (Forschungszentrum Juelich GmbH) -""" -import os - -from easybuild.easyblocks.generic.configuremake import ConfigureMake -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.config import build_option -from easybuild.tools.filetools import apply_regex_substitutions -from easybuild.tools.systemtools import get_cpu_features, get_shared_lib_ext -from easybuild.tools.toolchain.compiler import OPTARCH_GENERIC -from easybuild.tools.utilities import nub - - -ELPA_CPU_FEATURE_FLAGS = ['avx', 'avx2', 'avx512f', 'vsx', 'sse4_2'] - - -class EB_ELPA(ConfigureMake): - """Support for building/installing ELPA.""" - - @staticmethod - def extra_options(): - """Custom easyconfig parameters for ELPA.""" - extra_vars = { - 'auto_detect_cpu_features': [True, "Auto-detect available CPU features, and configure accordingly", CUSTOM], - 'with_mpi': [True, "Enable building of ELPA MPI library", CUSTOM], - 'with_openmp': [True, "Enable building of ELPA OpenMP library", CUSTOM], - 'with_shared': [True, "Enable building of shared ELPA libraries", CUSTOM], - 'with_single': [True, "Enable building of single precision ELPA functions", CUSTOM], - 'with_generic_kernel': [True, "Enable building of ELPA generic kernels", CUSTOM], - } - - for flag in ELPA_CPU_FEATURE_FLAGS: - if flag == 'sse4_2': - conf_opt = ['sse', 'sse-assembly'] - elif flag == 'avx512f': - conf_opt = ['avx512'] - else: - conf_opt = [flag] - - for opt in conf_opt: - help_msg = "Configure with --enable-%s (if None, auto-detect support for %s)" % (opt, flag.upper()) - extra_vars['use_%s' % flag] = [None, help_msg, CUSTOM] - - return ConfigureMake.extra_options(extra_vars) - - def __init__(self, *args, **kwargs): - """Initialisation of custom class variables for ELPA.""" - super(EB_ELPA, self).__init__(*args, **kwargs) - - for flag in ELPA_CPU_FEATURE_FLAGS: - # fail-safe: make sure we're not overwriting an existing attribute (could lead to weird bugs if we do) - if hasattr(self, flag): - raise EasyBuildError("EasyBlock attribute '%s' already exists") - setattr(self, flag, self.cfg['use_%s' % flag]) - - # auto-detect CPU features that can be used and are not enabled/disabled explicitly, - # but only if --optarch=GENERIC is not being used - if self.cfg['auto_detect_cpu_features']: - - # if --optarch=GENERIC is used, we will not use no CPU feature - if build_option('optarch') == OPTARCH_GENERIC: - cpu_features = [] - else: - cpu_features = ELPA_CPU_FEATURE_FLAGS - self.log.info("CPU features considered for auto-detection: %s", cpu_features) - - # get list of available CPU features, so we can check which ones to retain - avail_cpu_features = get_cpu_features() - - # on macOS, AVX is indicated with 'avx1.0' rather than 'avx' - if 'avx1.0' in avail_cpu_features: - avail_cpu_features.append('avx') - - self.log.info("List of available CPU features: %s", avail_cpu_features) - - for flag in cpu_features: - # only enable use of a particular CPU feature if it's still undecided (i.e. None) - if getattr(self, flag) is None and flag in avail_cpu_features: - self.log.info("Enabling use of %s (should be supported based on CPU features)", flag.upper()) - setattr(self, flag, True) - - def run_all_steps(self, *args, **kwargs): - """ - Put configure options in place for different builds (serial, openmp, mpi, openmp+mpi). - """ - - # the following configopts are common to all builds - if self.toolchain.options['pic']: - self.cfg.update('configopts', '--with-pic') - - if self.cfg['with_shared']: - self.cfg.update('configopts', '--enable-shared') - - if self.cfg['with_generic_kernel']: - self.cfg.update('configopts', '--enable-generic') - - if self.cfg['with_single']: - self.cfg.update('configopts', '--enable-single-precision') - - for flag in ELPA_CPU_FEATURE_FLAGS: - # many ELPA kernels are enabled by default, even when the - # CPU does not support them, so we disable them all, except - # when the appropriate CPU flag is found - # sse kernels require sse4_2 - if flag == 'sse4_2': - if getattr(self, flag): - self.cfg.update('configopts', '--enable-sse') - self.cfg.update('configopts', '--enable-sse-assembly') - else: - self.cfg.update('configopts', '--disable-sse') - self.cfg.update('configopts', '--disable-sse-assembly') - elif flag == 'avx512f': - if getattr(self, 'avx512f'): - self.cfg.update('configopts', '--enable-avx512') - else: - self.cfg.update('configopts', '--disable-avx512') - else: - if getattr(self, flag): - self.cfg.update('configopts', '--enable-%s' % flag) - else: - self.cfg.update('configopts', '--disable-%s' % flag) - -# # By default ELPA tries to use MPI and configure fails if it's not available -# # so we turn off MPI support unless MPI support is requested via the usempi toolchain option. -# # We also set the LIBS environmet variable to detect the correct linalg library -# # depending on the MPI availability. -# if self.toolchain.options.get('usempi', None): -# self.cfg.update('configopts', '--with-mpi=yes') -# self.cfg.update('configopts', 'LIBS="$LIBSCALAPACK"') -# else: -# self.cfg.update('configopts', '--with-mpi=no') -# self.cfg.update('configopts', 'LIBS="$LIBLAPACK"') - - # make all builds verbose - self.cfg.update('buildopts', 'V=1') - - # keep track of common configopts specified in easyconfig file, - # so we can include them in each iteration later - common_config_opts = self.cfg['configopts'] - common_build_opts = self.cfg['buildopts'] - - self.cfg['configopts'] = [] - self.cfg['buildopts'] = [] - - with_mpi_opts = [False] - if self.cfg['with_mpi']: - with_mpi_opts.append(True) - - with_omp_opts = [False] - if self.cfg['with_openmp']: - with_omp_opts.append(True) - - for with_mpi in with_mpi_opts: - if with_mpi: - mpi_configopt = '--with-mpi=yes' - linalgopt = 'LIBS="$LIBSCALAPACK $LIBS" LD_FLAGS="$LIBSCALAPACK $LD_FLAGS"' - else: - mpi_configopt = '--with-mpi=no' - linalgopt = 'LIBS="$LIBLAPACK $LIBS" LD_FLAGS="$LIBLAPACK $LD_FLAGS"' - - for with_omp in with_omp_opts: - if with_omp: - omp_configopt = '--enable-openmp' - else: - omp_configopt = '--disable-openmp' - - # append additional configure and build options - self.cfg.update('configopts', - [mpi_configopt + ' ' + omp_configopt + ' ' + linalgopt + ' ' + common_config_opts]) - self.cfg.update('buildopts', [linalgopt + ' ' + common_build_opts]) - - self.log.debug("List of configure options to iterate over: %s", self.cfg['configopts']) - self.log.debug("List of build options to iterate over: %s", self.cfg['buildopts']) - - return super(EB_ELPA, self).run_all_steps(*args, **kwargs) - - def patch_step(self, *args, **kwargs): - """Patch manual_cpp script to avoid using hardcoded /usr/bin/python.""" - super(EB_ELPA, self).patch_step(*args, **kwargs) - - # avoid that manual_cpp script uses hardcoded /usr/bin/python - manual_cpp = 'manual_cpp' - if os.path.exists(manual_cpp): - apply_regex_substitutions(manual_cpp, [(r'^#!/usr/bin/python$', '#!/usr/bin/env python')]) - - def sanity_check_step(self): - """Custom sanity check for ELPA.""" - - custom_paths = { - 'dirs': ['lib/pkgconfig', 'bin'], - } - - shlib_ext = get_shared_lib_ext() - - extra_files = [] - - with_mpi_opts = [False] - if self.cfg['with_mpi']: - with_mpi_opts.append(True) - - with_omp_opts = [False] - if self.cfg['with_openmp']: - with_omp_opts.append(True) - - for with_mpi in with_mpi_opts: - if with_mpi: - mpi_suff = '' - else: - mpi_suff = '_onenode' - - for with_omp in with_omp_opts: - if with_omp: - omp_suff = '_openmp' - else: - omp_suff = '' - - extra_files.append('include/elpa%s%s-%s/elpa/elpa.h' % (mpi_suff, omp_suff, self.version)) - extra_files.append('include/elpa%s%s-%s/modules/elpa.mod' % (mpi_suff, omp_suff, self.version)) - - extra_files.append('lib/libelpa%s%s.a' % (mpi_suff, omp_suff)) - if self.cfg['with_shared']: - extra_files.append('lib/libelpa%s%s.%s' % (mpi_suff, omp_suff, shlib_ext)) - - custom_paths['files'] = nub(extra_files) - - super(EB_ELPA, self).sanity_check_step(custom_paths=custom_paths) diff --git a/Custom_EasyBlocks/generic/intelbase.py b/Custom_EasyBlocks/generic/intelbase.py deleted file mode 100644 index 73360a0c533032603008d6dad3887d56761c1daa..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/generic/intelbase.py +++ /dev/null @@ -1,507 +0,0 @@ -# # -# Copyright 2009-2021 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -# # -""" -Generic EasyBuild support for installing Intel tools, implemented as an easyblock - -@author: Stijn De Weirdt (Ghent University) -@author: Dries Verdegem (Ghent University) -@author: Kenneth Hoste (Ghent University) -@author: Pieter De Baets (Ghent University) -@author: Jens Timmerman (Ghent University) -@author: Ward Poelmans (Ghent University) -@author: Lumir Jasiok (IT4Innovations) -@author: Damian Alvarez (Forschungszentrum Juelich GmbH) -""" - -import os -import re -import shutil -import stat -import tempfile -from distutils.version import LooseVersion - -import easybuild.tools.environment as env -from easybuild.framework.easyblock import EasyBlock -from easybuild.framework.easyconfig import CUSTOM -from easybuild.framework.easyconfig.types import ensure_iterable_license_specs -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import adjust_permissions, find_flexlm_license -from easybuild.tools.filetools import mkdir, read_file, remove_file, write_file -from easybuild.tools.run import run_cmd - - -# different supported activation types (cfr. Intel documentation) -ACTIVATION_EXIST_LIC = 'exist_lic' # use a license which exists on the system -ACTIVATION_LIC_FILE = 'license_file' # use a license file -ACTIVATION_LIC_SERVER = 'license_server' # use a license server -ACTIVATION_SERIAL = 'serial_number' # use a serial number -ACTIVATION_TRIAL = 'trial_lic' # use trial activation -ACTIVATION_TYPES = [ - ACTIVATION_EXIST_LIC, - ACTIVATION_LIC_FILE, - ACTIVATION_LIC_SERVER, - ACTIVATION_SERIAL, - ACTIVATION_TRIAL, -] - -# silent.cfg parameter name for type of license activation (cfr. options listed above) -ACTIVATION_NAME = 'ACTIVATION_TYPE' # since icc/ifort v2013_sp1, impi v4.1.1, imkl v11.1 -ACTIVATION_NAME_2012 = 'ACTIVATION' # previous activation type parameter used in older versions -# silent.cfg parameter name for install prefix -INSTALL_DIR_NAME = 'PSET_INSTALL_DIR' -# silent.cfg parameter name for install mode -INSTALL_MODE_NAME = 'PSET_MODE' -# Older (2015 and previous) silent.cfg parameter name for install mode -INSTALL_MODE_NAME_2015 = 'INSTALL_MODE' -# Install mode for 2016 version -INSTALL_MODE = 'install' -# Install mode for 2015 and older versions -INSTALL_MODE_2015 = 'NONRPM' -# silent.cfg parameter name for license file/server specification -LICENSE_FILE_NAME = 'ACTIVATION_LICENSE_FILE' # since icc/ifort v2013_sp1, impi v4.1.1, imkl v11.1 -LICENSE_FILE_NAME_2012 = 'PSET_LICENSE_FILE' # previous license file parameter used in older versions -LICENSE_SERIAL_NUMBER = 'ACTIVATION_SERIAL_NUMBER' - -COMP_ALL = 'ALL' -COMP_DEFAULTS = 'DEFAULTS' - - -class IntelBase(EasyBlock): - """ - Base class for Intel software - - no configure/make : binary release - - add license_file variable - """ - - def __init__(self, *args, **kwargs): - """Constructor, adds extra config options""" - super(IntelBase, self).__init__(*args, **kwargs) - - self.license_file = 'UNKNOWN' - self.license_env_var = 'UNKNOWN' - - # Initialise whether we need a runtime licence or not - self.requires_runtime_license = True - - self.home_subdir = os.path.join(os.getenv('HOME'), 'intel') - common_tmp_dir = os.path.dirname(tempfile.gettempdir()) # common tmp directory, same across nodes - self.home_subdir_local = os.path.join(common_tmp_dir, os.getenv('USER'), 'easybuild_intel') - - self.install_components = None - - def get_guesses_tools(self): - """Find reasonable paths for a subset of Intel tools, ignoring CPATH, LD_LIBRARY_PATH and LIBRARY_PATH""" - - guesses = super(IntelBase, self).make_module_req_guess() - - if self.cfg['m32']: - guesses['PATH'] = [os.path.join(self.subdir, 'bin32')] - else: - guesses['PATH'] = [os.path.join(self.subdir, 'bin64')] - - guesses['MANPATH'] = [os.path.join(self.subdir, 'man')] - - # make sure $CPATH, $LD_LIBRARY_PATH and $LIBRARY_PATH are not updated in generated module file, - # because that leads to problem when the libraries included with VTune/Advisor/Inspector are being picked up - for key in ['CPATH', 'LD_LIBRARY_PATH', 'LIBRARY_PATH']: - if key in guesses: - self.log.debug("Purposely not updating $%s in %s module file", key, self.name) - del guesses[key] - - return guesses - - def get_custom_paths_tools(self, binaries): - """Custom sanity check paths for certain Intel tools.""" - if self.cfg['m32']: - files = [os.path.join('bin32', b) for b in binaries] - dirs = ['lib32', 'include'] - else: - files = [os.path.join('bin64', b) for b in binaries] - dirs = ['lib64', 'include'] - - custom_paths = { - 'files': [os.path.join(self.subdir, f) for f in files], - 'dirs': [os.path.join(self.subdir, d) for d in dirs], - } - return custom_paths - - @staticmethod - def extra_options(extra_vars=None): - extra_vars = EasyBlock.extra_options(extra_vars) - extra_vars.update({ - 'license_activation': [ACTIVATION_LIC_SERVER, "License activation type", CUSTOM], - 'serial_number': [None, "Serial number for the product", CUSTOM], - 'requires_runtime_license': [True, "Boolean indicating whether or not a runtime license is required", - CUSTOM], - # 'usetmppath': - # workaround for older SL5 version (5.5 and earlier) - # used to be True, but False since SL5.6/SL6 - # disables TMP_PATH env and command line option - 'usetmppath': [False, "Use temporary path for installation", CUSTOM], - 'm32': [False, "Enable 32-bit toolchain", CUSTOM], - 'components': [None, "List of components to install", CUSTOM], - }) - - return extra_vars - - def parse_components_list(self): - """parse the regex in the components extra_options and select the matching components - from the mediaconfig.xml file in the install dir""" - - mediaconfigpath = os.path.join(self.cfg['start_dir'], 'pset', 'mediaconfig.xml') - if not os.path.isfile(mediaconfigpath): - raise EasyBuildError("Could not find %s to find list of components." % mediaconfigpath) - - mediaconfig = read_file(mediaconfigpath) - available_components = re.findall("<Abbr>(?P<component>[^<]+)</Abbr>", mediaconfig, re.M) - self.log.debug("Intel components found: %s" % available_components) - self.log.debug("Using regex list: %s" % self.cfg['components']) - - if COMP_ALL in self.cfg['components'] or COMP_DEFAULTS in self.cfg['components']: - if len(self.cfg['components']) == 1: - self.install_components = self.cfg['components'] - else: - raise EasyBuildError("If you specify %s as components, you cannot specify anything else: %s", - ' or '.join([COMP_ALL, COMP_DEFAULTS]), self.cfg['components']) - else: - self.install_components = [] - for comp_regex in self.cfg['components']: - comps = [comp for comp in available_components if re.match(comp_regex, comp)] - self.install_components.extend(comps) - - self.log.debug("Components to install: %s" % self.install_components) - - def clean_home_subdir(self): - """Remove contents of (local) 'intel' directory home subdir, where stuff is cached.""" - if os.path.exists(self.home_subdir_local): - self.log.debug("Cleaning up %s..." % self.home_subdir_local) - try: - for tree in os.listdir(self.home_subdir_local): - self.log.debug("... removing %s subtree" % tree) - path = os.path.join(self.home_subdir_local, tree) - if os.path.isfile(path) or os.path.islink(path): - remove_file(path) - else: - shutil.rmtree(path) - except OSError as err: - raise EasyBuildError("Cleaning up intel dir %s failed: %s", self.home_subdir_local, err) - - def setup_local_home_subdir(self): - """ - Intel script use $HOME/intel to cache stuff. - To enable parallel builds, we symlink $HOME/intel to a temporary dir on the local disk.""" - - try: - # make sure local directory exists - if not os.path.exists(self.home_subdir_local): - os.makedirs(self.home_subdir_local) - self.log.debug("Created local dir %s" % self.home_subdir_local) - - if os.path.exists(self.home_subdir): - # if 'intel' dir in $HOME already exists, make sure it's the right symlink - symlink_ok = os.path.islink(self.home_subdir) and os.path.samefile(self.home_subdir, - self.home_subdir_local) - if not symlink_ok: - # rename current 'intel' dir - home_intel_bk = tempfile.mkdtemp(dir=os.path.dirname(self.home_subdir), - prefix='%s.bk.' % os.path.basename(self.home_subdir)) - self.log.info("Moving %(ih)s to %(ihl)s, I need %(ih)s myself..." % {'ih': self.home_subdir, - 'ihl': home_intel_bk}) - shutil.move(self.home_subdir, home_intel_bk) - - # set symlink in place - os.symlink(self.home_subdir_local, self.home_subdir) - self.log.debug("Created symlink (1) %s to %s" % (self.home_subdir, self.home_subdir_local)) - - else: - # if a broken symlink is present, remove it first - if os.path.islink(self.home_subdir): - remove_file(self.home_subdir) - os.symlink(self.home_subdir_local, self.home_subdir) - self.log.debug("Created symlink (2) %s to %s" % (self.home_subdir, self.home_subdir_local)) - - except OSError as err: - raise EasyBuildError("Failed to symlink %s to %s: %s", self.home_subdir_local, self.home_subdir, err) - - def prepare_step(self, *args, **kwargs): - """Custom prepare step for IntelBase. Set up the license""" - requires_runtime_license = kwargs.pop('requires_runtime_license', True) - - super(IntelBase, self).prepare_step(*args, **kwargs) - - # Decide if we need a license or not (default is True because of defaults of individual Booleans) - self.requires_runtime_license = self.cfg['requires_runtime_license'] and requires_runtime_license - self.serial_number = self.cfg['serial_number'] - - if self.serial_number: - self.log.info("Using provided serial number (%s) and ignoring other licenses", self.serial_number) - elif self.requires_runtime_license: - default_lic_env_var = 'INTEL_LICENSE_FILE' - license_specs = ensure_iterable_license_specs(self.cfg['license_file']) - lic_specs, self.license_env_var = find_flexlm_license(custom_env_vars=[default_lic_env_var], - lic_specs=license_specs) - - if lic_specs: - if self.license_env_var is None: - self.log.info("Using Intel license specifications from 'license_file': %s", lic_specs) - self.license_env_var = default_lic_env_var - else: - self.log.info("Using Intel license specifications from $%s: %s", self.license_env_var, lic_specs) - - self.license_file = os.pathsep.join(lic_specs) - env.setvar(self.license_env_var, self.license_file) - - # if we have multiple retained lic specs, specify to 'use a license which exists on the system' - if len(lic_specs) > 1: - self.log.debug("More than one license specs found, using '%s' license activation instead of " - "'%s'", ACTIVATION_EXIST_LIC, self.cfg['license_activation']) - self.cfg['license_activation'] = ACTIVATION_EXIST_LIC - - # $INTEL_LICENSE_FILE should always be set during installation with existing license - env.setvar(default_lic_env_var, self.license_file) - else: - msg = "No viable license specifications found; " - msg += "specify 'license_file', or define $INTEL_LICENSE_FILE or $LM_LICENSE_FILE" - raise EasyBuildError(msg) - - def configure_step(self): - """Configure: handle license file and clean home dir.""" - - # prepare (local) 'intel' home subdir - self.setup_local_home_subdir() - self.clean_home_subdir() - - # determine list of components, based on 'components' easyconfig parameter (if specified) - if self.cfg['components']: - self.parse_components_list() - else: - self.log.debug("No components specified") - - def build_step(self): - """Binary installation files, so no building.""" - pass - - def install_step_classic(self, silent_cfg_names_map=None, silent_cfg_extras=None): - """Actual installation for versions prior to 2021.x - - - create silent cfg file - - set environment parameters - - execute command - """ - if silent_cfg_names_map is None: - silent_cfg_names_map = {} - - if self.serial_number or self.requires_runtime_license: - lic_entry = "" - if self.serial_number: - lic_entry = "%(license_serial_number)s=%(serial_number)s" - self.cfg['license_activation'] = ACTIVATION_SERIAL - else: - # license file entry is only applicable with license file or server type of activation - # also check whether specified activation type makes sense - lic_file_server_activations = [ACTIVATION_EXIST_LIC, ACTIVATION_LIC_FILE, ACTIVATION_LIC_SERVER] - other_activations = [act for act in ACTIVATION_TYPES if act not in lic_file_server_activations] - if self.cfg['license_activation'] in lic_file_server_activations: - lic_entry = "%(license_file_name)s=%(license_file)s" - elif not self.cfg['license_activation'] in other_activations: - raise EasyBuildError("Unknown type of activation specified: %s (known :%s)", - self.cfg['license_activation'], ACTIVATION_TYPES) - silent = '\n'.join([ - "%(activation_name)s=%(activation)s", - lic_entry, - "" # Add a newline at the end, so we can easily append if needed - ]) % { - 'activation_name': silent_cfg_names_map.get('activation_name', ACTIVATION_NAME), - 'activation': self.cfg['license_activation'], - 'license_file_name': silent_cfg_names_map.get('license_file_name', LICENSE_FILE_NAME), - 'license_file': self.license_file, - 'license_serial_number': silent_cfg_names_map.get('license_serial_number', LICENSE_SERIAL_NUMBER), - 'serial_number': self.serial_number, - } - else: - self.log.debug("No license required, so not including license specifications in silent.cfg") - silent = '' - - silent += '\n'.join([ - "%(install_dir_name)s=%(install_dir)s", - "ACCEPT_EULA=accept", - "%(install_mode_name)s=%(install_mode)s", - "CONTINUE_WITH_OPTIONAL_ERROR=yes", - "" # Add a newline at the end, so we can easily append if needed - ]) % { - 'install_dir_name': silent_cfg_names_map.get('install_dir_name', INSTALL_DIR_NAME), - 'install_dir': silent_cfg_names_map.get('install_dir', self.installdir), - 'install_mode': silent_cfg_names_map.get('install_mode', INSTALL_MODE_2015), - 'install_mode_name': silent_cfg_names_map.get('install_mode_name', INSTALL_MODE_NAME_2015), - } - - if self.install_components is not None: - if len(self.install_components) == 1 and self.install_components[0] in [COMP_ALL, COMP_DEFAULTS]: - # no quotes should be used for ALL or DEFAULTS - silent += 'COMPONENTS=%s\n' % self.install_components[0] - elif self.install_components: - # a list of components is specified (needs quotes) - components = ';'.join(self.install_components) - if LooseVersion(self.version) >= LooseVersion('2017'): - # for versions 2017.x and newer, double quotes should not be there... - silent += 'COMPONENTS=%s\n' % components - else: - silent += 'COMPONENTS="%s"\n' % components - else: - raise EasyBuildError("Empty list of matching components obtained via %s", self.cfg['components']) - - if silent_cfg_extras is not None: - if isinstance(silent_cfg_extras, dict): - silent += '\n'.join("%s=%s" % (key, value) for (key, value) in silent_cfg_extras.items()) - else: - raise EasyBuildError("silent_cfg_extras needs to be a dict") - - # we should be already in the correct directory - silentcfg = os.path.join(os.getcwd(), 'silent.cfg') - write_file(silentcfg, silent) - self.log.debug("Contents of %s:\n%s", silentcfg, silent) - - # workaround for mktmp: create tmp dir and use it - tmpdir = os.path.join(self.cfg['start_dir'], 'mytmpdir') - mkdir(tmpdir, parents=True) - - tmppathopt = '' - if self.cfg['usetmppath']: - env.setvar('TMP_PATH', tmpdir) - tmppathopt = "-t %s" % tmpdir - - # set some extra env variables - env.setvar('LOCAL_INSTALL_VERBOSE', '1') - env.setvar('VERBOSE_MODE', '1') - - env.setvar('INSTALL_PATH', self.installdir) - - # perform installation - cmd = ' '.join([ - self.cfg['preinstallopts'], - './install.sh', - tmppathopt, - '-s ' + silentcfg, - self.cfg['installopts'], - ]) - - return run_cmd(cmd, log_all=True, simple=True, log_output=True) - - def install_step_oneapi(self, *args, **kwargs): - """ - Actual installation for versions 2021.x onwards. - """ - # require that EULA is accepted - intel_eula_url = 'https://software.intel.com/content/www/us/en/develop/articles/end-user-license-agreement.html' - self.check_accepted_eula(name='Intel-oneAPI', more_info=intel_eula_url) - - # exactly one "source" file is expected: the (offline) installation script - if len(self.src) == 1: - install_script = self.src[0]['name'] - else: - src_fns = ', '.join([x['name'] for x in self.src]) - raise EasyBuildError("Expected to find exactly one 'source' file (installation script): %s", src_fns) - - adjust_permissions(install_script, stat.S_IXUSR) - - # see https://software.intel.com/content/www/us/en/develop/documentation/... - # .../installation-guide-for-intel-oneapi-toolkits-linux/top/... - # .../local-installer-full-package/install-with-command-line.html - cmd = [ - self.cfg['preinstallopts'], - './' + install_script, - '-a', # required to specify that following are options for installer - '--action install', - '--silent', - '--eula accept', - '--install-dir ' + self.installdir, - ] - - if self.install_components: - cmd.extend([ - '--components', - ':'.join(self.install_components), - ]) - - cmd.append(self.cfg['installopts']) - - return run_cmd(' '.join(cmd), log_all=True, simple=True, log_output=True) - - def install_step(self, *args, **kwargs): - """ - Install Intel software - """ - if LooseVersion(self.version) >= LooseVersion('2021'): - return self.install_step_oneapi(*args, **kwargs) - else: - return self.install_step_classic(*args, **kwargs) - - def move_after_install(self): - """Move installed files to correct location after installation.""" - subdir = os.path.join(self.installdir, self.name, self.version) - self.log.debug("Moving contents of %s to %s" % (subdir, self.installdir)) - try: - # remove senseless symlinks, e.g. impi_5.0.1 and impi_latest - majver = '.'.join(self.version.split('.')[:-1]) - for symlink in ['%s_%s' % (self.name, majver), '%s_latest' % self.name]: - symlink_fp = os.path.join(self.installdir, symlink) - if os.path.exists(symlink_fp): - remove_file(symlink_fp) - # move contents of 'impi/<version>' dir to installdir - for fil in os.listdir(subdir): - source = os.path.join(subdir, fil) - target = os.path.join(self.installdir, fil) - self.log.debug("Moving %s to %s" % (source, target)) - shutil.move(source, target) - shutil.rmtree(os.path.join(self.installdir, self.name)) - except OSError as err: - raise EasyBuildError("Failed to move contents of %s to %s: %s", subdir, self.installdir, err) - - def sanity_check_rpath(self): - """Skip the rpath sanity check, this is binary software""" - self.log.info("RPATH sanity check is skipped when using %s easyblock (derived from IntelBase)", - self.__class__.__name__) - - def make_module_extra(self, *args, **kwargs): - """Custom variable definitions in module file.""" - txt = super(IntelBase, self).make_module_extra(*args, **kwargs) - - if self.requires_runtime_license: - txt += self.module_generator.prepend_paths(self.license_env_var, [self.license_file], - allow_abs=True, expand_relpaths=False) - - return txt - - def cleanup_step(self): - """Cleanup leftover mess - - - clean home dir - - generic cleanup (get rid of build dir) - """ - self.clean_home_subdir() - - super(IntelBase, self).cleanup_step() - - # no default sanity check, needs to be implemented by derived class diff --git a/Custom_EasyBlocks/gromacs.py b/Custom_EasyBlocks/gromacs.py deleted file mode 100644 index a924a507a7437902c5869a6e2a8a3d96a40e8f29..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/gromacs.py +++ /dev/null @@ -1,693 +0,0 @@ -## -# Copyright 2013-2020 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing GROMACS, implemented as an easyblock - -@author: Kenneth Hoste (Ghent University) -@author: Ward Poelmans (Ghent University) -@author: Benjamin Roberts (The University of Auckland) -@author: Luca Marsella (CSCS) -@author: Guilherme Peretti-Pezzi (CSCS) -@author: Oliver Stueker (Compute Canada/ACENET) -@author: Davide Vanzo (Vanderbilt University) -""" -import glob -import os -import re -import shutil -from distutils.version import LooseVersion - -import easybuild.tools.environment as env -import easybuild.tools.toolchain as toolchain -from easybuild.easyblocks.generic.configuremake import ConfigureMake -from easybuild.easyblocks.generic.cmakemake import CMakeMake -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError, print_warning -from easybuild.tools.config import build_option -from easybuild.tools.filetools import which -from easybuild.tools.modules import get_software_libdir, get_software_root, get_software_version -from easybuild.tools.run import run_cmd -from easybuild.tools.toolchain.compiler import OPTARCH_GENERIC -from easybuild.tools.systemtools import X86_64, get_cpu_architecture, get_shared_lib_ext, get_cpu_features - - -class EB_GROMACS(CMakeMake): - """Support for building/installing GROMACS.""" - - @staticmethod - def extra_options(): - extra_vars = CMakeMake.extra_options() - extra_vars.update({ - 'double_precision': [None, "Build with double precision enabled (-DGMX_DOUBLE=ON), " + - "default is to build double precision unless CUDA is enabled", CUSTOM], - 'mpisuffix': ['_mpi', "Suffix to append to MPI-enabled executables (only for GROMACS < 4.6)", CUSTOM], - 'mpiexec': ['mpirun', "MPI executable to use when running tests", CUSTOM], - 'mpiexec_numproc_flag': ['-np', "Flag to introduce the number of MPI tasks when running tests", CUSTOM], - 'mpi_numprocs': [0, "Number of MPI tasks to use when running tests", CUSTOM], - }) - extra_vars['separate_build_dir'][0] = True - return extra_vars - - def __init__(self, *args, **kwargs): - """Initialize GROMACS-specific variables.""" - super(EB_GROMACS, self).__init__(*args, **kwargs) - self.lib_subdir = '' - self.pre_env = '' - self.cfg['build_shared_libs'] = self.cfg.get('build_shared_libs', False) - self.log.info("Using modified easyblock!") - - def get_gromacs_arch(self): - """Determine value of GMX_SIMD CMake flag based on optarch string. - - Refs: - [0] http://manual.gromacs.org/documentation/2016.3/install-guide/index.html#typical-installation - [1] http://manual.gromacs.org/documentation/2016.3/install-guide/index.html#simd-support - [2] http://www.gromacs.org/Documentation/Acceleration_and_parallelization - """ - # default: fall back on autodetection - res = None - - optarch = build_option('optarch') or '' - # take into account that optarch value is a dictionary if it is specified by compiler family - if isinstance(optarch, dict): - comp_fam = self.toolchain.comp_family() - optarch = optarch.get(comp_fam, '') - optarch = optarch.upper() - - # The list of GMX_SIMD options can be found - # http://manual.gromacs.org/documentation/2018/install-guide/index.html#simd-support - if 'MIC-AVX512' in optarch and LooseVersion(self.version) >= LooseVersion('2016'): - res = 'AVX_512_KNL' - elif 'AVX512' in optarch and LooseVersion(self.version) >= LooseVersion('2016'): - res = 'AVX_512' - elif 'AVX2' in optarch and LooseVersion(self.version) >= LooseVersion('5.0'): - res = 'AVX2_256' - elif 'AVX' in optarch: - res = 'AVX_256' - elif 'SSE3' in optarch or 'SSE2' in optarch or 'MARCH=NOCONA' in optarch: - # Gromacs doesn't have any GMX_SIMD=SSE3 but only SSE2 and SSE4.1 [1]. - # According to [2] the performance difference between SSE2 and SSE4.1 is minor on x86 - # and SSE4.1 is not supported by AMD Magny-Cours[1]. - res = 'SSE2' - elif optarch == OPTARCH_GENERIC: - cpu_arch = get_cpu_architecture() - if cpu_arch == X86_64: - res = 'SSE2' - else: - res = 'None' - elif optarch: - warn_msg = "--optarch configuration setting set to %s but not taken into account; " % optarch - warn_msg += "compiling GROMACS for the current host architecture (i.e. the default behavior)" - self.log.warning(warn_msg) - print_warning(warn_msg) - - if res: - self.log.info("Target architecture based on optarch configuration option ('%s'): %s", optarch, res) - else: - self.log.info("No target architecture specified based on optarch configuration option ('%s')", optarch) - - return res - - def is_double_precision_cuda_build(self): - """Check if the current build step involves double precision and CUDA""" - cuda = get_software_root('CUDA') - return cuda and self.double_prec_pattern in self.cfg['configopts'] - - def prepare_step(self, *args, **kwargs): - """Custom prepare step for GROMACS.""" - - # With the intel toolchain the -ftz build flag is automatically added, causing - # denormal results being flushed to zero. This will cause errors for very small - # arguments without FMA support since some intermediate results might be denormal. - # [https://redmine.gromacs.org/issues/2335] - # Set -fp-model precise on non-FMA CPUs to produce correct results. - if self.toolchain.comp_family() == toolchain.INTELCOMP: - cpu_features = get_cpu_features() - if 'fma' not in cpu_features: - self.log.info("FMA instruction not supported by this CPU: %s", cpu_features) - self.log.info("Setting precise=True intel toolchain option to remove -ftz build flag") - self.toolchain.options['precise'] = True - - # This must be called after enforcing the precise option otherwise the - # change will be ignored. - super(EB_GROMACS, self).prepare_step(*args, **kwargs) - - def configure_step(self): - """Custom configuration procedure for GROMACS: set configure options for configure or cmake.""" - - if LooseVersion(self.version) >= LooseVersion('4.6'): - cuda = get_software_root('CUDA') - if cuda: - # CUDA with double precision is currently not supported in GROMACS yet - # If easyconfig explicitly have double_precision=True error out, - # otherwise warn about it and skip the double precision build - if self.cfg.get('double_precision'): - raise EasyBuildError("Double precision is not available for GPU build. " + - "Please explicitly set \"double_precision = False\" " + - "or remove it in the easyconfig file.") - if self.double_prec_pattern in self.cfg['configopts']: - if self.cfg.get('double_precision') is None: - # Only print warning once when trying double precision - # build the first time - self.cfg['double_precision'] = False - self.log.info("Double precision is not available for " + - "GPU build. Skipping the double precision build.") - - self.log.info("skipping configure step") - return - - self.cfg.update('configopts', "-DGMX_GPU=ON -DCUDA_TOOLKIT_ROOT_DIR=%s" % cuda) - else: - # explicitly disable GPU support if CUDA is not available, - # to avoid that GROMACS find and uses a system-wide CUDA compiler - self.cfg.update('configopts', "-DGMX_GPU=OFF") - - # check whether PLUMED is loaded as a dependency - plumed_root = get_software_root('PLUMED') - if plumed_root: - # Need to check if PLUMED has an engine for this version - engine = 'gromacs-%s' % self.version - - (out, _) = run_cmd("plumed-patch -l", log_all=True, simple=False) - if not re.search(engine, out): - raise EasyBuildError("There is no support in PLUMED version %s for GROMACS %s: %s", - get_software_version('PLUMED'), self.version, out) - - # PLUMED patching must be done at different stages depending on - # version of GROMACS. Just prepare first part of cmd here - plumed_cmd = "plumed-patch -p -e %s" % engine - - if LooseVersion(self.version) < LooseVersion('4.6'): - self.log.info("Using configure script for configuring GROMACS build.") - - if self.cfg['build_shared_libs']: - self.cfg.update('configopts', "--enable-shared --disable-static") - else: - self.cfg.update('configopts', "--enable-static") - - # Use external BLAS and LAPACK - self.cfg.update('configopts', "--with-external-blas --with-external-lapack") - env.setvar('LIBS', "%s %s" % (os.environ['LIBLAPACK'], os.environ['LIBS'])) - - # Don't use the X window system - self.cfg.update('configopts', "--without-x") - - # OpenMP is not supported for versions older than 4.5. - if LooseVersion(self.version) >= LooseVersion('4.5'): - # enable OpenMP support if desired - if self.toolchain.options.get('openmp', None): - self.cfg.update('configopts', "--enable-threads") - else: - self.cfg.update('configopts', "--disable-threads") - elif self.toolchain.options.get('openmp', None): - raise EasyBuildError("GROMACS version %s does not support OpenMP" % self.version) - - # GSL support - if get_software_root('GSL'): - self.cfg.update('configopts', "--with-gsl") - else: - self.cfg.update('configopts', "--without-gsl") - - # actually run configure via ancestor (not direct parent) - self.cfg['configure_cmd'] = "./configure" - ConfigureMake.configure_step(self) - - # Now patch GROMACS for PLUMED between configure and build - if plumed_root: - run_cmd(plumed_cmd, log_all=True, simple=True) - - else: - # WARNING This seems ill defined. `self.cfg.get('mpi_numprocs') seems to return an integer - # but it is compared with a string returned by `self.cfg['parallel']`. - # I'm going to convert `self.cfg['parallel']` to an integer for the comparison, but it may - # be better to use `self.cfg.get` instead. - if '-DGMX_MPI=ON' in self.cfg['configopts']: - mpi_numprocs = int(self.cfg.get('mpi_numprocs', 0)) - if mpi_numprocs == 0: - self.log.info("No number of test MPI tasks specified -- using default: %s", - self.cfg['parallel']) - mpi_numprocs = int(self.cfg['parallel']) - - elif mpi_numprocs > int(self.cfg['parallel']): - self.log.warning("Number of test MPI tasks (%s) is greater than value for 'parallel': %s", - mpi_numprocs, self.cfg['parallel']) - - mpiexec = self.cfg.get('mpiexec') - if mpiexec: - mpiexec_path = which(mpiexec) - if mpiexec_path: - self.cfg.update('configopts', "-DMPIEXEC=%s" % mpiexec_path) - self.cfg.update('configopts', "-DMPIEXEC_NUMPROC_FLAG=%s" % - self.cfg.get('mpiexec_numproc_flag')) - self.cfg.update('configopts', "-DNUMPROC=%s" % mpi_numprocs) - elif self.cfg['runtest']: - raise EasyBuildError("'%s' not found in $PATH", mpiexec) - else: - raise EasyBuildError("No value found for 'mpiexec'") - self.log.info("Using %s as MPI executable when testing, with numprocs flag '%s' and %s tasks", - mpiexec_path, self.cfg.get('mpiexec_numproc_flag'), - mpi_numprocs) - - if LooseVersion(self.version) >= LooseVersion('2019'): - # Building the gmxapi interface requires shared libraries - self.cfg['build_shared_libs'] = True - self.cfg.update('configopts', "-DGMXAPI=ON") - - if LooseVersion(self.version) >= LooseVersion('2020'): - # build Python bindings if Python is loaded as a dependency - python_root = get_software_root('Python') - if python_root: - bin_python = os.path.join(python_root, 'bin', 'python') - self.cfg.update('configopts', "-DPYTHON_EXECUTABLE=%s" % bin_python) - self.cfg.update('configopts', "-DGMX_PYTHON_PACKAGE=ON") - - # Now patch GROMACS for PLUMED before cmake - if plumed_root: - if LooseVersion(self.version) >= LooseVersion('5.1'): - # Use shared or static patch depending on - # setting of self.cfg['build_shared_libs'] - # and adapt cmake flags accordingly as per instructions - # from "plumed patch -i" - if self.cfg['build_shared_libs']: - mode = 'shared' - else: - mode = 'static' - plumed_cmd = plumed_cmd + ' -m %s' % mode - - run_cmd(plumed_cmd, log_all=True, simple=True) - - # prefer static libraries, if available - if self.cfg['build_shared_libs']: - self.cfg.update('configopts', "-DGMX_PREFER_STATIC_LIBS=OFF") - else: - self.cfg.update('configopts', "-DGMX_PREFER_STATIC_LIBS=ON") - - # always specify to use external BLAS/LAPACK - self.cfg.update('configopts', "-DGMX_EXTERNAL_BLAS=ON -DGMX_EXTERNAL_LAPACK=ON") - - # disable GUI tools - self.cfg.update('configopts', "-DGMX_X11=OFF") - - # convince to build for an older architecture than present on the build node by setting GMX_SIMD CMake flag - # it does not make sense for Cray, because OPTARCH is defined by the Cray Toolchain - if self.toolchain.toolchain_family() != toolchain.CRAYPE: - gmx_simd = self.get_gromacs_arch() - if gmx_simd: - if LooseVersion(self.version) < LooseVersion('5.0'): - self.cfg.update('configopts', "-DGMX_CPU_ACCELERATION=%s" % gmx_simd) - else: - self.cfg.update('configopts', "-DGMX_SIMD=%s" % gmx_simd) - - # set regression test path - prefix = 'regressiontests' - if any([src['name'].startswith(prefix) for src in self.src]): - major_minor_version = '.'.join(self.version.split('.')[:2]) - self.cfg.update('configopts', "-DREGRESSIONTEST_PATH='%%(builddir)s/%s-%%(version)s' " % prefix) - - # enable OpenMP support if desired - if self.toolchain.options.get('openmp', None): - self.cfg.update('configopts', "-DGMX_OPENMP=ON") - else: - self.cfg.update('configopts', "-DGMX_OPENMP=OFF") - - if get_software_root('imkl'): - # using MKL for FFT, so it will also be used for BLAS/LAPACK - self.cfg.update('configopts', '-DGMX_FFT_LIBRARY=mkl -DMKL_INCLUDE_DIR="$EBROOTIMKL/mkl/include" ') - # don't touch MKL_LIBRARIES if set by easyconfig - if not "-DMKL_LIBRARIES" in self.cfg['configopts']: - libs = os.getenv('LAPACK_STATIC_LIBS').split(',') - mkl_libs = [os.path.join(os.getenv('LAPACK_LIB_DIR'), lib) for lib in libs if lib != 'libgfortran.a'] - mkl_libs = ['-Wl,--start-group'] + mkl_libs + ['-Wl,--end-group'] - self.cfg.update('configopts', '-DMKL_LIBRARIES="%s" ' % ';'.join(mkl_libs)) - else: -# TODO: Allow other BLAS libraries, for example, BLIS. - for libname in ['BLAS', 'LAPACK']: - libdir = os.getenv('%s_LIB_DIR' % libname) - if self.toolchain.toolchain_family() == toolchain.CRAYPE: - libsci_mpi_mp_lib = glob.glob(os.path.join(libdir, 'libsci_*_mpi_mp.a')) - if libsci_mpi_mp_lib: - self.cfg.update('configopts', '-DGMX_%s_USER=%s' % (libname, libsci_mpi_mp_lib[0])) - else: - raise EasyBuildError("Failed to find libsci library to link with for %s", libname) - else: -# Q: Where do these environment variables come from? - # -DGMX_BLAS_USER & -DGMX_LAPACK_USER require full path to library - libs = os.getenv('%s_STATIC_LIBS' % libname).split(',') - libpaths = [os.path.join(libdir, lib) for lib in libs if lib != 'libgfortran.a'] - self.cfg.update('configopts', '-DGMX_%s_USER="%s"' % (libname, ';'.join(libpaths))) - # if libgfortran.a is listed, make sure it gets linked in too to avoiding linking issues - if 'libgfortran.a' in libs: - env.setvar('LDFLAGS', "%s -lgfortran -lm" % os.environ.get('LDFLAGS', '')) - - # no more GSL support in GROMACS 5.x, see http://redmine.gromacs.org/issues/1472 - if LooseVersion(self.version) < LooseVersion('5.0'): - # enable GSL when it's provided - if get_software_root('GSL'): - self.cfg.update('configopts', "-DGMX_GSL=ON") - else: - self.cfg.update('configopts', "-DGMX_GSL=OFF") - - # include flags for linking to zlib/XZ in $LDFLAGS if they're listed as a dep; - # this is important for the tests, to correctly link against libxml2 - for dep, link_flag in [('XZ', '-llzma'), ('zlib', '-lz')]: - root = get_software_root(dep) - if root: - libdir = get_software_libdir(dep) - ldflags = os.environ.get('LDFLAGS', '') - env.setvar('LDFLAGS', "%s -L%s %s" % (ldflags, os.path.join(root, libdir), link_flag)) - - # complete configuration with configure_method of parent - out = super(EB_GROMACS, self).configure_step() - - # for recent GROMACS versions, make very sure that a decent BLAS, LAPACK and FFT is found and used -# NOTE: According to the GROMACS installation guide, BLAS doesn't really matter much, so why insist? -# I don't see that much of a difference between the internal FFTW and our build, so why insist? -# TODO: Check the previous statement at scale - if LooseVersion(self.version) >= LooseVersion('4.6.5'): - patterns = [ - r"Using external FFT library - \S*", - r"Looking for dgemm_ - found", - r"Looking for cheev_ - found", - ] - for pattern in patterns: - regex = re.compile(pattern, re.M) - if not regex.search(out): - raise EasyBuildError("Pattern '%s' not found in GROMACS configuration output.", pattern) - - def build_step(self): - """ - Custom build step for GROMACS; Skip if CUDA is enabled and the current - iteration is for double precision - """ - - if self.is_double_precision_cuda_build(): - self.log.info("skipping build step") - else: - super(EB_GROMACS, self).build_step() - - def test_step(self): - """Run the basic tests (but not necessarily the full regression tests) using make check""" - - if self.is_double_precision_cuda_build(): - self.log.info("skipping test step") - else: - # allow to escape testing by setting runtest to False - if self.cfg['runtest'] is None or self.cfg['runtest']: - - orig_runtest = self.cfg['runtest'] - # make very sure OMP_NUM_THREADS is set to 1, to avoid hanging GROMACS regression test - env.setvar('OMP_NUM_THREADS', '1') - - if self.cfg['runtest'] is None or isinstance(self.cfg['runtest'], bool): - self.cfg['runtest'] = 'check' - - # run 'make check' or whatever the easyconfig specifies - # in parallel since it involves more compilation - self.cfg.update('runtest', "-j %s" % self.cfg['parallel']) - super(EB_GROMACS, self).test_step() - - self.cfg['runtest'] = orig_runtest - - def install_step(self): - """ - Custom install step for GROMACS; figure out where libraries were installed to. - """ - # Skipping if CUDA is enabled and the current iteration is double precision - if self.is_double_precision_cuda_build(): - self.log.info("skipping install step") - else: - # run 'make install' in parallel since it involves more compilation - self.cfg.update('installopts', "-j %s" % self.cfg['parallel']) - - super(EB_GROMACS, self).install_step() - - # the GROMACS libraries get installed in different locations (deeper subdirectory), - # depending on the platform; - # this is determined by the GNUInstallDirs CMake module; - # rather than trying to replicate the logic, we just figure out where the library was placed - - if self.cfg['build_shared_libs']: - self.libext = get_shared_lib_ext() - else: - self.libext = 'a' - - if LooseVersion(self.version) < LooseVersion('5.0'): - libname = 'libgmx*.%s' % self.libext - else: - libname = 'libgromacs*.%s' % self.libext - - for libdir in ['lib', 'lib64']: - if os.path.exists(os.path.join(self.installdir, libdir)): - for subdir in [libdir, os.path.join(libdir, '*')]: - libpaths = glob.glob(os.path.join(self.installdir, subdir, libname)) - if libpaths: - self.lib_subdir = os.path.dirname(libpaths[0])[len(self.installdir)+1:] - self.log.info("Found lib subdirectory that contains %s: %s", libname, self.lib_subdir) - break - if not self.lib_subdir: - raise EasyBuildError("Failed to determine lib subdirectory in %s", self.installdir) - - # Reset installopts etc for the benefit of the gmxapi extension - self.cfg['installopts'] = self.orig_installopts - - def extensions_step(self, fetch=False): - """ Custom extensions step, only handle extensions after the last iteration round""" - if self.iter_idx < self.variants_to_build - 1: - self.log.info("skipping extension step %s", self.iter_idx) - else: - # Set runtest to None so that the gmxapi extension doesn't try to - # run "check" as a command - orig_runtest = self.cfg['runtest'] - self.cfg['runtest'] = None - super(EB_GROMACS, self).extensions_step(fetch) - self.cfg['runtest'] = orig_runtest - - def make_module_req_guess(self): - """Custom library subdirectories for GROMACS.""" - guesses = super(EB_GROMACS, self).make_module_req_guess() - guesses.update({ - 'LD_LIBRARY_PATH': [self.lib_subdir], - 'LIBRARY_PATH': [self.lib_subdir], - 'PKG_CONFIG_PATH': [os.path.join(self.lib_subdir, 'pkgconfig')], - }) - return guesses - - def sanity_check_step(self): - """Custom sanity check for GROMACS.""" - - dirs = [os.path.join('include', 'gromacs')] - - # in GROMACS v5.1, only 'gmx' binary is there - # (only) in GROMACS v5.0, other binaries are symlinks to 'gmx' - # bins/libs that never have an _mpi suffix - bins = [] - libnames = [] - # bins/libs that may have an _mpi suffix - mpi_bins = [] - mpi_libnames = [] - if LooseVersion(self.version) < LooseVersion('5.1'): - mpi_bins.extend(['mdrun']) - - if LooseVersion(self.version) >= LooseVersion('5.0'): - mpi_bins.append('gmx') - mpi_libnames.append('gromacs') - else: - bins.extend(['editconf', 'g_lie', 'genbox', 'genconf']) - libnames.extend(['gmxana']) - if LooseVersion(self.version) >= LooseVersion('4.6'): - if self.cfg['build_shared_libs']: - mpi_libnames.extend(['gmx', 'md']) - else: - libnames.extend(['gmx', 'md']) - else: - mpi_libnames.extend(['gmx', 'md']) - - if LooseVersion(self.version) >= LooseVersion('4.5'): - if LooseVersion(self.version) >= LooseVersion('4.6'): - if self.cfg['build_shared_libs']: - mpi_libnames.append('gmxpreprocess') - else: - libnames.append('gmxpreprocess') - else: - mpi_libnames.append('gmxpreprocess') - - # also check for MPI-specific binaries/libraries - if self.toolchain.options.get('usempi', None): - if LooseVersion(self.version) < LooseVersion('4.6'): - mpisuff = self.cfg.get('mpisuffix', '_mpi') - else: - mpisuff = '_mpi' - - mpi_bins.extend([binary + mpisuff for binary in mpi_bins]) - mpi_libnames.extend([libname + mpisuff for libname in mpi_libnames]) - - suffixes = [''] - - # make sure that configopts is a list: - configopts_list = self.cfg['configopts'] - if isinstance(configopts_list, str): - configopts_list = [configopts_list] - - lib_files = [] - bin_files = [] - - dsuff = None - if not get_software_root('CUDA'): - for configopts in configopts_list: - # add the _d suffix to the suffix, in case of double precision - if self.double_prec_pattern in configopts: - dsuff = '_d' - - if dsuff: - suffixes.extend([dsuff]) - - lib_files.extend([ - 'lib%s%s.%s' % (x, suff, self.libext) for x in libnames + mpi_libnames for suff in suffixes - ]) - bin_files.extend([b + suff for b in bins + mpi_bins for suff in suffixes]) - - # pkgconfig dir not available for earlier versions, exact version to use here is unclear - if LooseVersion(self.version) >= LooseVersion('4.6'): - dirs.append(os.path.join(self.lib_subdir, 'pkgconfig')) - - custom_paths = { - 'files': [os.path.join('bin', b) for b in bin_files] + - [os.path.join(self.lib_subdir, l) for l in lib_files], - 'dirs': dirs, - } - super(EB_GROMACS, self).sanity_check_step(custom_paths=custom_paths) - - def run_all_steps(self, *args, **kwargs): - """ - Put configure options in place for different variants, (no)mpi, single/double precision. - """ - # Save installopts so we can reset it later. The gmxapi pip install - # can't handle the -j argument. - self.orig_installopts = self.cfg['installopts'] - - # keep track of config/build/installopts specified in easyconfig - # file, so we can include them in each iteration later - common_config_opts = self.cfg['configopts'] - common_build_opts = self.cfg['buildopts'] - common_install_opts = self.cfg['installopts'] - - self.orig_install_cmd = self.cfg['install_cmd'] - self.orig_build_cmd = self.cfg['build_cmd'] - - self.cfg['configopts'] = [] - self.cfg['buildopts'] = [] - self.cfg['installopts'] = [] - - if LooseVersion(self.version) < LooseVersion('4.6'): - prec_opts = { - 'single': '--disable-double', - 'double': '--enable-double', - } - mpi_type_opts = { - 'nompi': '--disable-mpi', - 'mpi': '--enable-mpi' - } - else: - prec_opts = { - 'single': '-DGMX_DOUBLE=OFF', - 'double': '-DGMX_DOUBLE=ON', - } - mpi_type_opts = { - 'nompi': '-DGMX_MPI=OFF -DGMX_THREAD_MPI=ON', - 'mpi': '-DGMX_MPI=ON -DGMX_THREAD_MPI=OFF' - } - - # Double precision pattern so search for in configopts - self.double_prec_pattern = prec_opts['double'] - - # For older versions we only build/install the mdrun part for - # the MPI variant. So we need to be able to specify the - # install target depending on variant. - self.cfg['install_cmd'] = 'make' - if LooseVersion(self.version) < LooseVersion('5'): - # Use the fact that for older versions we just need to - # build and install mdrun for the MPI part - build_opts = { - 'nompi': '', - 'mpi': 'mdrun' - } - install_opts = { - 'nompi': 'install', - 'mpi': 'install-mdrun' - } - else: - build_opts = { - 'nompi': '', - 'mpi': '' - } - install_opts = { - 'nompi': 'install', - 'mpi': 'install' - } - - precisions = ['single'] - if self.cfg.get('double_precision') is None or self.cfg.get('double_precision'): - precisions.append('double') - - mpitypes = ['nompi'] - if self.toolchain.options.get('usempi', None): - mpitypes.append('mpi') - - # We need to count the number of variations to build. - versions_built = [] - # Handle the different variants - for precision in precisions: - for mpitype in mpitypes: - versions_built.append('%s precision %s' % (precision, mpitype)) - var_confopts = [] - var_buildopts = [] - var_installopts = [] - - var_confopts.append(mpi_type_opts[mpitype]) - var_confopts.append(prec_opts[precision]) - if LooseVersion(self.version) < LooseVersion('4.6'): - suffix = '' - if mpitype == 'mpi': - suffix = "--program-suffix={0}".format(self.cfg.get('mpisuffix', '_mpi')) - if precision == 'double': - suffix += '_d' - var_confopts.append(suffix) - - var_buildopts.append(build_opts[mpitype]) - var_installopts.append(install_opts[mpitype]) - - self.cfg.update('configopts', ' '.join(var_confopts + [common_config_opts])) - self.cfg.update('buildopts', ' '.join(var_buildopts + [common_build_opts])) - self.cfg.update('installopts', ' '.join(var_installopts + [common_install_opts])) - self.variants_to_build = len(self.cfg['configopts']) - - self.log.debug("List of configure options to iterate over: %s", self.cfg['configopts']) - self.log.info("Building these variants of GROMACS: %s", ', '.join(versions_built)) - return super(EB_GROMACS, self).run_all_steps(*args, **kwargs) - - self.cfg['install_cmd'] = self.orig_install_cmd - self.cfg['build_cmd'] = self.orig_build_cmd - - self.log.info("A full regression test suite is available from the GROMACS web site: %s", self.cfg['homepage']) diff --git a/Custom_EasyBlocks/hypre.py b/Custom_EasyBlocks/hypre.py deleted file mode 100644 index 495ad6866d276056102ee49eb62a704f34de1e2a..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/hypre.py +++ /dev/null @@ -1,119 +0,0 @@ -## -# Copyright 2009-2021 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for Hypre, implemented as an easyblock - -@author: Kenneth Hoste (Ghent University) -@author: Mikael OEhman (Chalmers University of Technology) -@author: Alex Domingo (Vrije Universiteit Brussel) -@author: Simon Branford (University of Birmingham) -""" -import os - -from easybuild.easyblocks.generic.configuremake import ConfigureMake -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.config import build_option -from easybuild.tools.modules import get_software_root -from easybuild.tools.systemtools import get_shared_lib_ext - - -class EB_Hypre(ConfigureMake): - """Support for building Hypre.""" - - @staticmethod - def extra_options(): - extra_vars = { - 'withcuda': [False, "Build with CUDA support", CUSTOM], - } - return ConfigureMake.extra_options(extra_vars) - - def __init__(self, *args, **kwargs): - """Easyblock constructor.""" - - super(EB_Hypre, self).__init__(*args, **kwargs) - - self.config_shared = False - self.config_static = False - - def configure_step(self): - """Configure Hypre build after setting extra configure options.""" - - if '--enable-shared' in self.cfg['configopts']: - self.config_shared = True - ext_libs = 'LIB%s' - else: - self.config_static = True - ext_libs = '%s_STATIC_LIBS' - - # Use BLAS/LAPACK from EB - for dep in ["BLAS", "LAPACK"]: - blas_libs = ' '.join(os.getenv(ext_libs % dep).split(',')) - # Remove any '-l' as those are prepended for shared builds - blas_libs = blas_libs.replace('-l', '') - self.cfg.update('configopts', '--with-%s-libs="%s"' % - (dep.lower(), blas_libs)) - self.cfg.update('configopts', '--with-%s-lib-dirs="%s"' % (dep.lower(), - os.getenv('%s_LIB_DIR' % dep))) - - # Use MPI implementation from EB - self.cfg.update('configopts', '--with-MPI-include=%s' % - os.getenv('MPI_INC_DIR')) - - if self.cfg['withcuda']: - if get_software_root('CUDA'): - self.cfg.update('configopts', '--with-cuda') - - cuda_cc = build_option( - 'cuda_compute_capabilities') or self.cfg['cuda_compute_capabilities'] - if not cuda_cc: - raise EasyBuildError('List of CUDA compute capabilities must be specified, either via ' - 'cuda_compute_capabilities easyconfig parameter or via ' - '--cuda-compute-capabilities') - - cuda_cc_string = ' '.join( - [x.replace('.', '') for x in cuda_cc]) - self.cfg.update( - 'configopts', '--with-gpu-arch="%s"' % cuda_cc_string) - - super(EB_Hypre, self).configure_step() - - def sanity_check_step(self): - """Custom sanity check for Hypre.""" - - # Add static and shared libs depending on configopts - hypre_libs = list() - if self.config_shared: - shlib_ext = get_shared_lib_ext() - hypre_libs.append(os.path.join('lib', 'libHYPRE.%s' % shlib_ext)) - if self.config_static: - hypre_libs.append(os.path.join('lib', 'libHYPRE.a')) - - custom_paths = { - 'files': hypre_libs, - 'dirs': ['include'] - } - - super(EB_Hypre, self).sanity_check_step(custom_paths=custom_paths) diff --git a/Custom_EasyBlocks/imkl.py b/Custom_EasyBlocks/imkl.py deleted file mode 100644 index 9d9196990e2a27f606cdbf919dfdb0f1a2b5e748..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/imkl.py +++ /dev/null @@ -1,500 +0,0 @@ -# # -# Copyright 2009-2021 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -# # -""" -EasyBuild support for installing the Intel Math Kernel Library (MKL), implemented as an easyblock - -@author: Stijn De Weirdt (Ghent University) -@author: Dries Verdegem (Ghent University) -@author: Kenneth Hoste (Ghent University) -@author: Pieter De Baets (Ghent University) -@author: Jens Timmerman (Ghent University) -@author: Ward Poelmans (Ghent University) -@author: Lumir Jasiok (IT4Innovations) -""" -import glob -import itertools -import os -import shutil -import tempfile -from distutils.version import LooseVersion - -import easybuild.tools.environment as env -import easybuild.tools.toolchain as toolchain -from easybuild.easyblocks.generic.intelbase import IntelBase, ACTIVATION_NAME_2012, LICENSE_FILE_NAME_2012 -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import apply_regex_substitutions, change_dir, remove_dir, write_file -from easybuild.tools.modules import get_software_root -from easybuild.tools.run import run_cmd -from easybuild.tools.systemtools import get_shared_lib_ext - - -class EB_imkl(IntelBase): - """ - Class that can be used to install mkl - - tested with 10.2.1.017 - -- will fail for all older versions (due to newer silent installer) - """ - - @staticmethod - def extra_options(): - """Add easyconfig parameters custom to imkl (e.g. interfaces).""" - extra_vars = { - 'interfaces': [True, "Indicates whether interfaces should be built", CUSTOM], - } - return IntelBase.extra_options(extra_vars) - - def __init__(self, *args, **kwargs): - """Constructor for imkl easyblock.""" - super(EB_imkl, self).__init__(*args, **kwargs) - - # make sure $MKLROOT isn't set, it's known to cause problems with the installation - self.cfg.update('unwanted_env_vars', ['MKLROOT']) - self.cdftlibs = [] - self.mpi_spec = None - - if LooseVersion(self.version) >= LooseVersion('2021'): - self.mkl_basedir = os.path.join('mkl', self.version) - else: - self.mkl_basedir = 'mkl' - - def prepare_step(self, *args, **kwargs): - """Prepare build environment.""" - - if LooseVersion(self.version) >= LooseVersion('2017.2.174'): - kwargs['requires_runtime_license'] = False - super(EB_imkl, self).prepare_step(*args, **kwargs) - else: - super(EB_imkl, self).prepare_step(*args, **kwargs) - - # build the mkl interfaces, if desired - if self.cfg['interfaces']: - self.cdftlibs = ['fftw2x_cdft'] - if LooseVersion(self.version) >= LooseVersion('10.3'): - self.cdftlibs.append('fftw3x_cdft') - # check whether MPI_FAMILY constant is defined, so mpi_family() can be used - if hasattr(self.toolchain, 'MPI_FAMILY') and self.toolchain.MPI_FAMILY is not None: - mpi_spec_by_fam = { - toolchain.MPICH: 'mpich2', # MPICH is MPICH v3.x, which is MPICH2 compatible - toolchain.MPICH2: 'mpich2', - toolchain.MVAPICH2: 'mpich2', - toolchain.OPENMPI: 'openmpi', - } - mpi_fam = self.toolchain.mpi_family() - self.mpi_spec = mpi_spec_by_fam.get(mpi_fam) - debugstr = "MPI toolchain component" - else: - # can't use toolchain.mpi_family, because of system toolchain - if get_software_root('MPICH2') or get_software_root('MVAPICH2'): - self.mpi_spec = 'mpich2' - elif get_software_root('OpenMPI'): - self.mpi_spec = 'openmpi' - elif not get_software_root('impi'): - # no compatible MPI found: do not build cdft - self.cdftlibs = [] - debugstr = "loaded MPI module" - if self.mpi_spec: - self.log.debug("Determined MPI specification based on %s: %s", debugstr, self.mpi_spec) - else: - self.log.debug("No MPI or no compatible MPI found: do not build CDFT") - - def install_step(self): - """ - Actual installation - - create silent cfg file - - execute command - """ - silent_cfg_names_map = None - silent_cfg_extras = None - - if LooseVersion(self.version) < LooseVersion('11.1'): - # since imkl v11.1, silent.cfg has been slightly changed to be 'more standard' - - silent_cfg_names_map = { - 'activation_name': ACTIVATION_NAME_2012, - 'license_file_name': LICENSE_FILE_NAME_2012, - } - - if LooseVersion(self.version) >= LooseVersion('11.1') and self.install_components is None: - silent_cfg_extras = { - 'COMPONENTS': 'ALL', - } - - super(EB_imkl, self).install_step( - silent_cfg_names_map=silent_cfg_names_map, - silent_cfg_extras=silent_cfg_extras) - - def make_module_req_guess(self): - """ - A dictionary of possible directories to look for - """ - guesses = super(EB_imkl, self).make_module_req_guess() - - if LooseVersion(self.version) >= LooseVersion('10.3'): - if self.cfg['m32']: - raise EasyBuildError("32-bit not supported yet for IMKL v%s (>= 10.3)", self.version) - else: - if LooseVersion(self.version) >= LooseVersion('2021'): - compiler_subdir = os.path.join('compiler', self.version, 'linux', 'compiler', 'lib', 'intel64_lin') - pkg_config_path = [os.path.join(self.mkl_basedir, 'tools', 'pkgconfig')] - else: - compiler_subdir = os.path.join('lib', 'intel64') - pkg_config_path = [os.path.join(self.mkl_basedir, 'bin', 'pkgconfig')] - guesses['MANPATH'] = ['man', os.path.join('man', 'en_US')] - if LooseVersion(self.version) >= LooseVersion('11.0'): - if LooseVersion(self.version) >= LooseVersion('11.3'): - guesses['MIC_LD_LIBRARY_PATH'] = [ - os.path.join('lib', 'intel64_lin_mic'), - os.path.join(self.mkl_basedir, 'lib', 'mic'), - ] - elif LooseVersion(self.version) >= LooseVersion('11.1'): - guesses['MIC_LD_LIBRARY_PATH'] = [ - os.path.join('lib', 'mic'), - os.path.join(self.mkl_basedir, 'lib', 'mic'), - ] - else: - guesses['MIC_LD_LIBRARY_PATH'] = [ - os.path.join('compiler', 'lib', 'mic'), - os.path.join(self.mkl_basedir, 'lib', 'mic'), - ] - library_path = [ - compiler_subdir, - os.path.join(self.mkl_basedir, 'lib', 'intel64'), - ] - cpath = [ - os.path.join(self.mkl_basedir, 'include'), - os.path.join(self.mkl_basedir, 'include', 'fftw'), - ] - guesses.update({ - 'PATH': [], - 'LD_LIBRARY_PATH': library_path, - 'LIBRARY_PATH': library_path, - 'CPATH': cpath, - 'PKG_CONFIG_PATH': pkg_config_path, - }) - else: - if self.cfg['m32']: - guesses.update({ - 'PATH': ['bin', 'bin/ia32', 'tbb/bin/ia32'], - 'LD_LIBRARY_PATH': ['lib', 'lib/32'], - 'LIBRARY_PATH': ['lib', 'lib/32'], - 'MANPATH': ['man', 'share/man', 'man/en_US'], - }) - - else: - guesses.update({ - 'PATH': ['bin', 'bin/intel64', 'tbb/bin/em64t'], - 'LD_LIBRARY_PATH': ['lib', 'lib/em64t'], - 'LIBRARY_PATH': ['lib', 'lib/em64t'], - 'MANPATH': ['man', 'share/man', 'man/en_US'], - }) - return guesses - - def make_module_extra(self): - """Overwritten from Application to add extra txt""" - - if 'MKL_EXAMPLES' not in self.cfg['modextravars']: - self.cfg.update('modextravars', { - 'MKL_EXAMPLES': os.path.join(self.installdir, self.mkl_basedir, 'examples'), - }) - - txt = super(EB_imkl, self).make_module_extra() - - if LooseVersion(self.version) >= LooseVersion('2021'): - mklroot = os.path.join(self.installdir, 'mkl', self.version) - else: - mklroot = os.path.join(self.installdir, 'mkl') - - txt += self.module_generator.set_environment('MKLROOT', mklroot) - return txt - - def post_install_step(self): - """ - Install group libraries and interfaces (if desired). - """ - super(EB_imkl, self).post_install_step() - - # extract examples - examples_subdir = os.path.join(self.installdir, self.mkl_basedir, 'examples') - if os.path.exists(examples_subdir): - cwd = change_dir(examples_subdir) - for examples_tarball in glob.glob('examples_*.tgz'): - run_cmd("tar xvzf %s -C ." % examples_tarball) - change_dir(cwd) - - # reload the dependencies - self.load_dependency_modules() - - shlib_ext = get_shared_lib_ext() - - if self.cfg['m32']: - extra = { - 'libmkl.%s' % shlib_ext: 'GROUP (-lmkl_intel -lmkl_intel_thread -lmkl_core)', - 'libmkl_em64t.a': 'GROUP (libmkl_intel.a libmkl_intel_thread.a libmkl_core.a)', - 'libmkl_solver.a': 'GROUP (libmkl_solver.a)', - 'libmkl_scalapack.a': 'GROUP (libmkl_scalapack_core.a)', - 'libmkl_lapack.a': 'GROUP (libmkl_intel.a libmkl_intel_thread.a libmkl_core.a)', - 'libmkl_cdft.a': 'GROUP (libmkl_cdft_core.a)' - } - else: - extra = { - 'libmkl.%s' % shlib_ext: 'GROUP (-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core)', - 'libmkl_em64t.a': 'GROUP (libmkl_intel_lp64.a libmkl_intel_thread.a libmkl_core.a)', - 'libmkl_solver.a': 'GROUP (libmkl_solver_lp64.a)', - 'libmkl_scalapack.a': 'GROUP (libmkl_scalapack_lp64.a)', - 'libmkl_lapack.a': 'GROUP (libmkl_intel_lp64.a libmkl_intel_thread.a libmkl_core.a)', - 'libmkl_cdft.a': 'GROUP (libmkl_cdft_core.a)' - } - - loosever = LooseVersion(self.version) - - if loosever >= LooseVersion('10.3'): - libsubdir = os.path.join(self.mkl_basedir, 'lib', 'intel64') - else: - if self.cfg['m32']: - libsubdir = os.path.join('lib', '32') - else: - libsubdir = os.path.join('lib', 'em64t') - - for fil, txt in extra.items(): - dest = os.path.join(self.installdir, libsubdir, fil) - if not os.path.exists(dest): - write_file(dest, txt) - - # build the mkl interfaces, if desired - if self.cfg['interfaces']: - - if loosever >= LooseVersion('10.3'): - intsubdir = os.path.join(self.mkl_basedir, 'interfaces') - inttarget = 'libintel64' - else: - intsubdir = 'interfaces' - if self.cfg['m32']: - inttarget = 'lib32' - else: - inttarget = 'libem64t' - - cmd = "make -f makefile %s" % inttarget - - # blas95 and lapack95 need more work, ignore for now - # blas95 and lapack also need include/.mod to be processed - fftw2libs = ['fftw2xc', 'fftw2xf'] - fftw3libs = ['fftw3xc', 'fftw3xf'] - - interfacedir = os.path.join(self.installdir, intsubdir) - change_dir(interfacedir) - self.log.info("Changed to interfaces directory %s", interfacedir) - - compopt = None - # determine whether we're using a non-Intel GCC-based or PGI-based toolchain - # can't use toolchain.comp_family, because of system toolchain used when installing imkl - if get_software_root('icc') or get_software_root('intel-compilers'): - compopt = 'compiler=intel' - else: - # check for PGI first, since there's a GCC underneath PGI too... - if get_software_root('PGI'): - compopt = 'compiler=pgi' - elif get_software_root('GCC'): - compopt = 'compiler=gnu' - else: - raise EasyBuildError("Not using Intel/GCC/PGI compilers, don't know how to build wrapper libs") - - # patch makefiles for cdft wrappers when PGI is used as compiler - if get_software_root('PGI'): - regex_subs = [ - # pgi should be considered as a valid compiler - ("intel gnu", "intel gnu pgi"), - # transform 'gnu' case to 'pgi' case - (r"ifeq \(\$\(compiler\),gnu\)", "ifeq ($(compiler),pgi)"), - ('=gcc', '=pgcc'), - # correct flag to use C99 standard - ('-std=c99', '-c99'), - # -Wall and -Werror are not valid options for pgcc, no close equivalent - ('-Wall', ''), - ('-Werror', ''), - ] - for lib in self.cdftlibs: - apply_regex_substitutions(os.path.join(interfacedir, lib, 'makefile'), regex_subs) - - for lib in fftw2libs + fftw3libs + self.cdftlibs: - buildopts = [compopt] - if lib in fftw3libs: - buildopts.append('install_to=$INSTALL_DIR') - elif lib in self.cdftlibs: - if self.mpi_spec is not None: - buildopts.append('mpi=%s' % self.mpi_spec) - - precflags = [''] - if lib.startswith('fftw2x') and not self.cfg['m32']: - # build both single and double precision variants - precflags = ['PRECISION=MKL_DOUBLE', 'PRECISION=MKL_SINGLE'] - - intflags = [''] - if lib in self.cdftlibs and not self.cfg['m32']: - # build both 32-bit and 64-bit interfaces - intflags = ['interface=lp64', 'interface=ilp64'] - - allopts = [list(opts) for opts in itertools.product(intflags, precflags)] - - for flags, extraopts in itertools.product(['', '-fPIC'], allopts): - tup = (lib, flags, buildopts, extraopts) - self.log.debug("Building lib %s with: flags %s, buildopts %s, extraopts %s" % tup) - - tmpbuild = tempfile.mkdtemp(dir=self.builddir) - self.log.debug("Created temporary directory %s" % tmpbuild) - - # always set INSTALL_DIR, SPEC_OPT, COPTS and CFLAGS - # fftw2x(c|f): use $INSTALL_DIR, $CFLAGS and $COPTS - # fftw3x(c|f): use $CFLAGS - # fftw*cdft: use $INSTALL_DIR and $SPEC_OPT - env.setvar('INSTALL_DIR', tmpbuild) - env.setvar('SPEC_OPT', flags) - env.setvar('COPTS', flags) - env.setvar('CFLAGS', flags) - - try: - intdir = os.path.join(interfacedir, lib) - os.chdir(intdir) - self.log.info("Changed to interface %s directory %s" % (lib, intdir)) - except OSError as err: - raise EasyBuildError("Can't change to interface %s directory %s: %s", lib, intdir, err) - - fullcmd = "%s %s" % (cmd, ' '.join(buildopts + extraopts)) - res = run_cmd(fullcmd, log_all=True, simple=True) - if not res: - raise EasyBuildError("Building %s (flags: %s, fullcmd: %s) failed", lib, flags, fullcmd) - - for fn in os.listdir(tmpbuild): - src = os.path.join(tmpbuild, fn) - if flags == '-fPIC': - # add _pic to filename - ff = fn.split('.') - fn = '.'.join(ff[:-1]) + '_pic.' + ff[-1] - dest = os.path.join(self.installdir, libsubdir, fn) - try: - if os.path.isfile(src): - shutil.move(src, dest) - self.log.info("Moved %s to %s" % (src, dest)) - except OSError as err: - raise EasyBuildError("Failed to move %s to %s: %s", src, dest, err) - - remove_dir(tmpbuild) - - def sanity_check_step(self): - """Custom sanity check paths for Intel MKL.""" - shlib_ext = get_shared_lib_ext() - - mklfiles = None - mkldirs = None - ver = LooseVersion(self.version) - libs = ['libmkl_core.%s' % shlib_ext, 'libmkl_gnu_thread.%s' % shlib_ext, - 'libmkl_intel_thread.%s' % shlib_ext, 'libmkl_sequential.%s' % shlib_ext] - extralibs = ['libmkl_blacs_intelmpi_%(suff)s.' + shlib_ext, 'libmkl_scalapack_%(suff)s.' + shlib_ext] - - if self.cfg['interfaces']: - if get_software_root('icc') or get_software_root('intel-compilers'): - compsuff = '_intel' - # check for PGI first, since there's a GCC underneath PGI too... - elif get_software_root('PGI'): - compsuff = '_pgi' - elif get_software_root('GCC'): - compsuff = '_gnu' - else: - raise EasyBuildError("Not using Intel/GCC/PGI, don't know compiler suffix for FFTW libraries.") - - precs = ['_double', '_single'] - if ver < LooseVersion('11'): - # no precision suffix in libfftw2 libs before imkl v11 - precs = [''] - fftw_vers = ['2x%s%s' % (x, prec) for x in ['c', 'f'] for prec in precs] + ['3xc', '3xf'] - pics = ['', '_pic'] - libs += ['libfftw%s%s%s.a' % (fftwver, compsuff, pic) for fftwver in fftw_vers for pic in pics] - - if self.cdftlibs: - fftw_cdft_vers = ['2x_cdft_DOUBLE'] - if not self.cfg['m32']: - fftw_cdft_vers.append('2x_cdft_SINGLE') - if ver >= LooseVersion('10.3'): - fftw_cdft_vers.append('3x_cdft') - if ver >= LooseVersion('11.0.2'): - bits = ['_lp64'] - if not self.cfg['m32']: - bits.append('_ilp64') - else: - # no bits suffix in cdft libs before imkl v11.0.2 - bits = [''] - libs += ['libfftw%s%s%s.a' % x for x in itertools.product(fftw_cdft_vers, bits, pics)] - - if ver >= LooseVersion('10.3') and self.cfg['m32']: - raise EasyBuildError("Sanity check for 32-bit not implemented yet for IMKL v%s (>= 10.3)", self.version) - - if ver >= LooseVersion('10.3'): - mkldirs = [ - os.path.join(self.mkl_basedir, 'bin'), - os.path.join(self.mkl_basedir, 'lib', 'intel64'), - os.path.join(self.mkl_basedir, 'include'), - ] - libs += [lib % {'suff': suff} for lib in extralibs for suff in ['lp64', 'ilp64']] - - mklfiles = [os.path.join(self.mkl_basedir, 'include', 'mkl.h')] - mklfiles.extend([os.path.join(self.mkl_basedir, 'lib', 'intel64', lib) for lib in libs]) - - if ver >= LooseVersion('2021'): - - mklfiles.append(os.path.join(self.mkl_basedir, 'lib', 'intel64', 'libmkl_core.%s' % shlib_ext)) - - elif ver >= LooseVersion('10.3'): - if ver < LooseVersion('11.3'): - mkldirs.append(os.path.join(self.mkl_basedir, 'bin', 'intel64')) - - mklfiles.append(os.path.join(self.mkl_basedir, 'lib', 'intel64', 'libmkl.%s' % shlib_ext)) - - if ver >= LooseVersion('10.3.4') and ver < LooseVersion('11.1'): - mkldirs += [os.path.join('compiler', 'lib', 'intel64')] - elif ver >= LooseVersion('2017.0.0'): - mkldirs += [os.path.join('lib', 'intel64_lin')] - else: - mkldirs += [os.path.join('lib', 'intel64')] - - else: - if self.cfg['m32']: - lib_subdir = '32' - else: - lib_subdir = 'em64t' - libs += [lib % {'suff': suff} for lib in extralibs for suff in ['lp64', 'ilp64']] - - mklfiles = [ - os.path.join('lib', lib_subdir, 'libmkl.%s' % shlib_ext), - os.path.join('include', 'mkl.h'), - ] - mklfiles.extend([os.path.join('lib', lib_subdir, lib) for lib in libs]) - mkldirs = [os.path.join('lib', lib_subdir), os.path.join('include', lib_subdir), 'interfaces'] - - custom_paths = { - 'files': mklfiles, - 'dirs': mkldirs, - } - - super(EB_imkl, self).sanity_check_step(custom_paths=custom_paths) diff --git a/Custom_EasyBlocks/impi.py b/Custom_EasyBlocks/impi.py deleted file mode 100644 index 54d7520ebaa850df8b4263439afb2cc2b5d2be22..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/impi.py +++ /dev/null @@ -1,376 +0,0 @@ -# # -# Copyright 2009-2021 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -# # -""" -EasyBuild support for installing the Intel MPI library, implemented as an easyblock - -@author: Stijn De Weirdt (Ghent University) -@author: Dries Verdegem (Ghent University) -@author: Kenneth Hoste (Ghent University) -@author: Pieter De Baets (Ghent University) -@author: Jens Timmerman (Ghent University) -@author: Damian Alvarez (Forschungszentrum Juelich GmbH) -@author: Alex Domingo (Vrije Universiteit Brussel) -""" -import os -from distutils.version import LooseVersion - -import easybuild.tools.toolchain as toolchain -from easybuild.easyblocks.generic.intelbase import IntelBase, ACTIVATION_NAME_2012, LICENSE_FILE_NAME_2012 -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.config import build_option -from easybuild.tools.filetools import apply_regex_substitutions, change_dir, extract_file, mkdir, write_file -from easybuild.tools.modules import get_software_root -from easybuild.tools.run import run_cmd -from easybuild.tools.systemtools import get_shared_lib_ext -from easybuild.tools.toolchain.mpi import get_mpi_cmd_template - - -class EB_impi(IntelBase): - """ - Support for installing Intel MPI library - """ - @staticmethod - def extra_options(): - extra_vars = { - 'libfabric_configopts': ['', 'Configure options for the provided libfabric', CUSTOM], - 'libfabric_rebuild': [True, 'Try to rebuild internal libfabric instead of using provided binary', CUSTOM], - 'ofi_internal': [True, 'Use internal shipped libfabric instead of external libfabric', CUSTOM], - 'set_mpi_wrappers_compiler': [False, 'Override default compiler used by MPI wrapper commands', CUSTOM], - 'set_mpi_wrapper_aliases_gcc': [False, 'Set compiler for mpigcc/mpigxx via aliases', CUSTOM], - 'set_mpi_wrapper_aliases_intel': [False, 'Set compiler for mpiicc/mpiicpc/mpiifort via aliases', CUSTOM], - 'set_mpi_wrappers_all': [False, 'Set (default) compiler for all MPI wrapper commands', CUSTOM], - } - return IntelBase.extra_options(extra_vars) - - def prepare_step(self, *args, **kwargs): - if LooseVersion(self.version) >= LooseVersion('2017.2.174'): - kwargs['requires_runtime_license'] = False - super(EB_impi, self).prepare_step(*args, **kwargs) - else: - super(EB_impi, self).prepare_step(*args, **kwargs) - - def install_step(self): - """ - Actual installation - - create silent cfg file - - execute command - """ - impiver = LooseVersion(self.version) - - if impiver >= LooseVersion('2021'): - super(EB_impi, self).install_step() - - elif impiver >= LooseVersion('4.0.1'): - # impi starting from version 4.0.1.x uses standard installation procedure. - - silent_cfg_names_map = {} - - if impiver < LooseVersion('4.1.1'): - # since impi v4.1.1, silent.cfg has been slightly changed to be 'more standard' - silent_cfg_names_map.update({ - 'activation_name': ACTIVATION_NAME_2012, - 'license_file_name': LICENSE_FILE_NAME_2012, - }) - - super(EB_impi, self).install_step(silent_cfg_names_map=silent_cfg_names_map) - - # impi v4.1.1 and v5.0.1 installers create impi/<version> subdir, so stuff needs to be moved afterwards - if impiver == LooseVersion('4.1.1.036') or impiver >= LooseVersion('5.0.1.035'): - super(EB_impi, self).move_after_install() - else: - # impi up until version 4.0.0.x uses custom installation procedure. - silent = """[mpi] -INSTALLDIR=%(ins)s -LICENSEPATH=%(lic)s -INSTALLMODE=NONRPM -INSTALLUSER=NONROOT -UPDATE_LD_SO_CONF=NO -PROCEED_WITHOUT_PYTHON=yes -AUTOMOUNTED_CLUSTER=yes -EULA=accept -[mpi-rt] -INSTALLDIR=%(ins)s -LICENSEPATH=%(lic)s -INSTALLMODE=NONRPM -INSTALLUSER=NONROOT -UPDATE_LD_SO_CONF=NO -PROCEED_WITHOUT_PYTHON=yes -AUTOMOUNTED_CLUSTER=yes -EULA=accept - -""" % {'lic': self.license_file, 'ins': self.installdir} - - # already in correct directory - silentcfg = os.path.join(os.getcwd(), "silent.cfg") - write_file(silentcfg, silent) - self.log.debug("Contents of %s: %s", silentcfg, silent) - - tmpdir = os.path.join(os.getcwd(), self.version, 'mytmpdir') - mkdir(tmpdir, parents=True) - - cmd = "./install.sh --tmp-dir=%s --silent=%s" % (tmpdir, silentcfg) - run_cmd(cmd, log_all=True, simple=True) - - # recompile libfabric (if requested) - # some Intel MPI versions (like 2019 update 6) no longer ship libfabric sources - libfabric_path = os.path.join(self.installdir, 'libfabric') - if impiver >= LooseVersion('2019') and self.cfg['libfabric_rebuild']: - if self.cfg['ofi_internal']: - libfabric_src_tgz_fn = 'src.tgz' - if os.path.exists(os.path.join(libfabric_path, libfabric_src_tgz_fn)): - change_dir(libfabric_path) - srcdir = extract_file(libfabric_src_tgz_fn, os.getcwd(), change_into_dir=False) - change_dir(srcdir) - libfabric_installpath = os.path.join(self.installdir, 'intel64', 'libfabric') - - make = 'make' - if self.cfg['parallel']: - make += ' -j %d' % self.cfg['parallel'] - - cmds = [ - './configure --prefix=%s %s' % (libfabric_installpath, self.cfg['libfabric_configopts']), - make, - 'make install' - ] - for cmd in cmds: - run_cmd(cmd, log_all=True, simple=True) - else: - self.log.info("Rebuild of libfabric is requested, but %s does not exist, so skipping...", - libfabric_src_tgz_fn) - else: - raise EasyBuildError("Rebuild of libfabric is requested, but ofi_internal is set to False.") - - def post_install_step(self): - """Custom post install step for IMPI, fix broken env scripts after moving installed files.""" - super(EB_impi, self).post_install_step() - - impiver = LooseVersion(self.version) - - if impiver >= LooseVersion('2021'): - self.log.info("No post-install action for impi v%s", self.version) - - elif impiver == LooseVersion('4.1.1.036') or impiver >= LooseVersion('5.0.1.035'): - if impiver >= LooseVersion('2018.0.128'): - script_paths = [os.path.join('intel64', 'bin')] - else: - script_paths = [os.path.join('intel64', 'bin'), os.path.join('mic', 'bin')] - # fix broken env scripts after the move - regex_subs = [(r"^setenv I_MPI_ROOT.*", r"setenv I_MPI_ROOT %s" % self.installdir)] - for script in [os.path.join(script_path, 'mpivars.csh') for script_path in script_paths]: - apply_regex_substitutions(os.path.join(self.installdir, script), regex_subs) - regex_subs = [(r"^(\s*)I_MPI_ROOT=[^;\n]*", r"\1I_MPI_ROOT=%s" % self.installdir)] - for script in [os.path.join(script_path, 'mpivars.sh') for script_path in script_paths]: - apply_regex_substitutions(os.path.join(self.installdir, script), regex_subs) - - # fix 'prefix=' in compiler wrapper scripts after moving installation (see install_step) - wrappers = ['mpif77', 'mpif90', 'mpigcc', 'mpigxx', 'mpiicc', 'mpiicpc', 'mpiifort'] - regex_subs = [(r"^prefix=.*", r"prefix=%s" % self.installdir)] - for script_dir in script_paths: - for wrapper in wrappers: - wrapper_path = os.path.join(self.installdir, script_dir, wrapper) - if os.path.exists(wrapper_path): - apply_regex_substitutions(wrapper_path, regex_subs) - - def sanity_check_step(self): - """Custom sanity check paths for IMPI.""" - - impi_ver = LooseVersion(self.version) - - suff = '64' - if self.cfg['m32']: - suff = '' - - mpi_mods = ['mpi.mod'] - if impi_ver > LooseVersion('4.0'): - mpi_mods.extend(['mpi_base.mod', 'mpi_constants.mod', 'mpi_sizeofs.mod']) - - if impi_ver >= LooseVersion('2021'): - mpi_subdir = os.path.join('mpi', self.version) - bin_dir = os.path.join(mpi_subdir, 'bin') - include_dir = os.path.join(mpi_subdir, 'include') - lib_dir = os.path.join(mpi_subdir, 'lib', 'release') - - elif impi_ver >= LooseVersion('2019'): - bin_dir = os.path.join('intel64', 'bin') - include_dir = os.path.join('intel64', 'include') - lib_dir = os.path.join('intel64', 'lib', 'release') - else: - bin_dir = 'bin%s' % suff - include_dir = 'include%s' % suff - lib_dir = 'lib%s' % suff - mpi_mods.extend(['i_malloc.h']) - - shlib_ext = get_shared_lib_ext() - custom_paths = { - 'files': [os.path.join(bin_dir, 'mpi%s' % x) for x in ['icc', 'icpc', 'ifort']] + - [os.path.join(include_dir, 'mpi%s.h' % x) for x in ['cxx', 'f', '', 'o', 'of']] + - [os.path.join(include_dir, x) for x in mpi_mods] + - [os.path.join(lib_dir, 'libmpi.%s' % shlib_ext)] + - [os.path.join(lib_dir, 'libmpi.a')], - 'dirs': [], - } - - custom_commands = [] - - if build_option('mpi_tests'): - if impi_ver >= LooseVersion('2017'): - # Add minimal test program to sanity checks - if impi_ver >= LooseVersion('2021'): - impi_testsrc = os.path.join(self.installdir, 'mpi', self.version, 'test', 'test.c') - else: - impi_testsrc = os.path.join(self.installdir, 'test', 'test.c') - - impi_testexe = os.path.join(self.builddir, 'mpi_test') - self.log.info("Adding minimal MPI test program to sanity checks: %s", impi_testsrc) - - # Build test program with appropriate compiler from current toolchain - comp_fam = self.toolchain.comp_family() - if comp_fam == toolchain.INTELCOMP: - build_comp = 'mpiicc' - else: - build_comp = 'mpicc' - build_cmd = "%s %s -o %s" % (build_comp, impi_testsrc, impi_testexe) - - # Execute test program with appropriate MPI executable for target toolchain - params = {'nr_ranks': self.cfg['parallel'], 'cmd': impi_testexe} - mpi_cmd_tmpl, params = get_mpi_cmd_template(toolchain.INTELMPI, params, mpi_version=self.version) - - custom_commands.extend([ - build_cmd, # build test program - mpi_cmd_tmpl % params, # run test program - ]) - - super(EB_impi, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands) - - def make_module_req_guess(self): - """ - A dictionary of possible directories to look for - """ - guesses = super(EB_impi, self).make_module_req_guess() - if self.cfg['m32']: - lib_dirs = ['lib', 'lib/ia32', 'ia32/lib'] - guesses.update({ - 'PATH': ['bin', 'bin/ia32', 'ia32/bin'], - 'LD_LIBRARY_PATH': lib_dirs, - 'LIBRARY_PATH': lib_dirs, - 'MIC_LD_LIBRARY_PATH': ['mic/lib'], - }) - else: - manpath = 'man' - - impi_ver = LooseVersion(self.version) - if impi_ver >= LooseVersion('2021'): - mpi_subdir = os.path.join('mpi', self.version) - lib_dirs = [ - os.path.join(mpi_subdir, 'lib'), - os.path.join(mpi_subdir, 'lib', 'release'), - os.path.join(mpi_subdir, 'libfabric', 'lib'), - ] - include_dirs = [os.path.join(mpi_subdir, 'include')] - path_dirs = [ - os.path.join(mpi_subdir, 'bin'), - os.path.join(mpi_subdir, 'libfabric', 'bin'), - ] - manpath = os.path.join(mpi_subdir, 'man') - - if self.cfg['ofi_internal']: - libfabric_dir = os.path.join('mpi', self.version, 'libfabric') - lib_dirs.append(os.path.join(libfabric_dir, 'lib')) - path_dirs.append(os.path.join(libfabric_dir, 'bin')) - guesses['FI_PROVIDER_PATH'] = [os.path.join(libfabric_dir, 'lib', 'prov')] - - elif impi_ver >= LooseVersion('2019'): - # The "release" library is default in v2019. Give it precedence over intel64/lib. - # (remember paths are *prepended*, so the last path in the list has highest priority) - lib_dirs = [os.path.join('intel64', x) for x in ['lib', os.path.join('lib', 'release')]] - include_dirs = [os.path.join('intel64', 'include')] - path_dirs = [os.path.join('intel64', 'bin')] - if self.cfg['ofi_internal']: - lib_dirs.append(os.path.join('intel64', 'libfabric', 'lib')) - path_dirs.append(os.path.join('intel64', 'libfabric', 'bin')) - guesses['FI_PROVIDER_PATH'] = [os.path.join('intel64', 'libfabric', 'lib', 'prov')] - else: - lib_dirs = [os.path.join('lib', 'em64t'), 'lib64'] - include_dirs = ['include64'] - path_dirs = [os.path.join('bin', 'intel64'), 'bin64'] - guesses['MIC_LD_LIBRARY_PATH'] = [os.path.join('mic', 'lib')] - - guesses.update({ - 'PATH': path_dirs, - 'LD_LIBRARY_PATH': lib_dirs, - 'LIBRARY_PATH': lib_dirs, - 'MANPATH': [manpath], - 'CPATH': include_dirs, - }) - - return guesses - - def make_module_extra(self, *args, **kwargs): - """Overwritten from Application to add extra txt""" - txt = super(EB_impi, self).make_module_extra(*args, **kwargs) - impiver = LooseVersion(self.version) - if impiver >= LooseVersion('2021'): - i_mpi_root = os.path.join(self.installdir, 'mpi', self.version) - else: - i_mpi_root = self.installdir - txt += self.module_generator.set_environment('I_MPI_ROOT', i_mpi_root) - if self.cfg['set_mpi_wrappers_compiler'] or self.cfg['set_mpi_wrappers_all']: - for var in ['CC', 'CXX', 'F77', 'F90', 'FC']: - if var == 'FC': - # $FC isn't defined by EasyBuild framework, so use $F90 instead - src_var = 'F90' - else: - src_var = var - - target_var = 'I_MPI_%s' % var - - val = os.getenv(src_var) - if val: - txt += self.module_generator.set_environment(target_var, val) - else: - raise EasyBuildError("Environment variable $%s not set, can't define $%s", src_var, target_var) - - if self.cfg['set_mpi_wrapper_aliases_gcc'] or self.cfg['set_mpi_wrappers_all']: - # force mpigcc/mpigxx to use GCC compilers, as would be expected based on their name - txt += self.module_generator.set_alias('mpigcc', 'mpigcc -cc=gcc') - txt += self.module_generator.set_alias('mpigxx', 'mpigxx -cxx=g++') - - if self.cfg['set_mpi_wrapper_aliases_intel'] or self.cfg['set_mpi_wrappers_all']: - # do the same for mpiicc/mpiipc/mpiifort to be consistent, even if they may not exist - txt += self.module_generator.set_alias('mpiicc', 'mpiicc -cc=icc') - txt += self.module_generator.set_alias('mpiicpc', 'mpiicpc -cxx=icpc') - # -fc also works, but -f90 takes precedence - txt += self.module_generator.set_alias('mpiifort', 'mpiifort -f90=ifort') - - # set environment variable UCX_TLS to 'all', this works in all hardware configurations - # needed with UCX regardless of the transports available (even without a Mellanox HCA) - # more information in easybuilders/easybuild-easyblocks#2253 - if get_software_root('UCX'): - # do not overwrite settings in the easyconfig - if 'UCX_TLS' not in self.cfg['modextravars']: - txt += self.module_generator.set_environment('UCX_TLS', 'all') - - return txt diff --git a/Custom_EasyBlocks/julia.py b/Custom_EasyBlocks/julia.py deleted file mode 100644 index 51f485136bce56c82533fffa4dbe9caa8a977fc8..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/julia.py +++ /dev/null @@ -1,236 +0,0 @@ -## -# Copyright 2009-2019 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing Julia packages, implemented as an easyblock - -@author: Victor Holanda (CSCS) -@author: Samuel Omlin (CSCS) -minor adjustments by Jens Henrik Goebbert (JSC) -""" -import os -import socket - -from easybuild.tools.config import build_option -from easybuild.framework.easyconfig import CUSTOM -from easybuild.easyblocks.generic.configuremake import ConfigureMake -from easybuild.tools import systemtools - - -class EB_Julia(ConfigureMake): - """ - Install an Julia package as a separate module, or as an extension. - """ - @staticmethod - def extra_options(extra_vars=None): - extra_vars = { - 'system_name': [None, "Change julia's Project.toml pathname", CUSTOM], - 'arch_name': [None, "Change julia's Project.toml pathname", CUSTOM], - } - return ConfigureMake.extra_options(extra_vars) - - - def get_environment_folder(self): - env_path = '' - - systemname = 'default' - if self.cfg['system_name']: - systemname = self.cfg['system_name'] - - if self.cfg['arch_name']: - env_path = '-'.join([systemname, self.cfg['arch_name']]) - return env_path - - arch = systemtools.get_cpu_architecture() - cpu_family = systemtools.get_cpu_family() - env_path = '-'.join([systemname, cpu_family, arch]) - return env_path - - def get_user_depot_path(self): - user_depot_path = '' - - arch = systemtools.get_cpu_architecture() - cpu_family = systemtools.get_cpu_family() - user_depot_path = os.path.join('~', '.julia', self.version, self.get_environment_folder()) - return user_depot_path - - def __init__(self, *args, **kwargs): - super(EB_Julia, self).__init__(*args, **kwargs) - - self.user_depot = self.get_user_depot_path() - local_share_depot = os.path.join(self.installdir, 'local', 'share', 'julia') - share_depot = os.path.join(self.installdir, 'share', 'julia') - self.std_depots = ':'.join([local_share_depot, share_depot]) - self.julia_depot_path = ':'.join([self.user_depot, self.std_depots]) - self.admin_depots = os.path.join(self.installdir, 'extensions') - - self.julia_project = os.path.join(self.user_depot, "environments", '-'.join([self.version, self.get_environment_folder()])) - - self.user_load_path = '@:@#.#.#-%s' % self.get_environment_folder() - self.std_load_paths = '@stdlib' - self.julia_load_path = ':'.join([self.user_load_path, self.std_load_paths]) - self.admin_load_path = os.path.join(self.admin_depots, "environments", '-'.join([self.version, self.get_environment_folder()])) - - def sanity_check_step(self): - """Custom sanity check for Julia.""" - - custom_paths = { - 'files': [os.path.join('bin', 'julia'), 'LICENSE.md'], - 'dirs': ['bin', 'include', 'lib', 'share'], - } - custom_commands = [ - "julia --version", - "julia --eval '1+2'", - ] - - super(EB_Julia, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands) - - def install_step(self, *args, **kwargs): - """Install procedure for Julia""" - - super(EB_Julia, self).install_step(*args, **kwargs) - txt = """ -## Read EB environment variables - -if haskey(ENV, "EBJULIA_ADMIN_LOAD_PATH") - ADMIN_LOAD_PATH = split(ENV["EBJULIA_ADMIN_LOAD_PATH"],':') -else - ADMIN_LOAD_PATH = [] -end - -if haskey(ENV, "EBJULIA_STD_LOAD_PATH") - STD_LOAD_PATH = split(ENV["EBJULIA_STD_LOAD_PATH"],':') -else - STD_LOAD_PATH = [] -end - -if haskey(ENV, "EBJULIA_ADMIN_DEPOT_PATH") - ADMIN_DEPOT_PATH = split(ENV["EBJULIA_ADMIN_DEPOT_PATH"],':') -else - ADMIN_DEPOT_PATH = [] -end - -if haskey(ENV, "EBJULIA_STD_DEPOT_PATH") - STD_DEPOT_PATH = split(ENV["EBJULIA_STD_DEPOT_PATH"],':') -else - STD_DEPOT_PATH = [] -end - - -## Inject the admin paths, except if paths empty (or only "@" for LOAD_PATH) or all entries in std path. -if !( isempty(LOAD_PATH) || isempty(DEPOT_PATH) || (length(LOAD_PATH)==1 && LOAD_PATH[1]=="@") || - all([entry in STD_LOAD_PATH for entry in LOAD_PATH]) || all([entry in STD_DEPOT_PATH for entry in DEPOT_PATH]) ) - - ## Inject the admin load path into the LOAD_PATH - - # Empty the LOAD_PATH, separating load path into user and std load path. - user_load_path = [] - std_load_path = [] - while !isempty(LOAD_PATH) - entry = popfirst!(LOAD_PATH) - if entry in STD_LOAD_PATH - push!(std_load_path, entry) - else - push!(user_load_path, entry) - end - end - - # Add user load path to LOAD_PATH - while !isempty(user_load_path) - entry = popfirst!(user_load_path) - push!(LOAD_PATH, entry) - end - - # Add admin load path to LOAD_PATH - while !isempty(ADMIN_LOAD_PATH) - entry = popfirst!(ADMIN_LOAD_PATH) - push!(LOAD_PATH, entry) - end - - # Add std load path to LOAD_PATH - while !isempty(std_load_path) - entry = popfirst!(std_load_path) - push!(LOAD_PATH, entry) - end - - - ## Inject the admin depot path into the DEPOT_PATH - - # Empty the DEPOT_PATH, separating depots into user and std depots. - user_depot_path = [] - std_depot_path = [] - while !isempty(DEPOT_PATH) - depot = popfirst!(DEPOT_PATH) - if depot in STD_DEPOT_PATH - push!(std_depot_path, depot) - else - push!(user_depot_path, depot) - end - end - - # Add user depots to DEPOT_PATH - while !isempty(user_depot_path) - depot = popfirst!(user_depot_path) - push!(DEPOT_PATH, depot) - end - - # Add admin depots to DEPOT_PATH - while !isempty(ADMIN_DEPOT_PATH) - depot = popfirst!(ADMIN_DEPOT_PATH) - push!(DEPOT_PATH, depot) - end - - # Add std depots to DEPOT_PATH - while !isempty(std_depot_path) - depot = popfirst!(std_depot_path) - push!(DEPOT_PATH, depot) - end - -end - - """ - with open(os.path.join(self.installdir, 'etc', 'julia', 'startup.jl'), 'w') as startup_file: - startup_file.write(txt) - startup_file.close() - - def make_module_extra(self, *args, **kwargs): - txt = super(EB_Julia, self).make_module_extra(*args, **kwargs) - - txt += self.module_generator.set_environment('JULIA_INSTALL_FOLDER', self.installdir) - - txt += self.module_generator.set_environment('JULIA_PROJECT', self.julia_project) - txt += self.module_generator.set_environment('JULIA_DEPOT_PATH', self.julia_depot_path) - txt += self.module_generator.set_environment('EBJULIA_USER_DEPOT_PATH', self.user_depot) - txt += self.module_generator.set_environment('EBJULIA_ADMIN_DEPOT_PATH', self.admin_depots) - txt += self.module_generator.set_environment('EBJULIA_STD_DEPOT_PATH', self.std_depots) - - - txt += self.module_generator.set_environment('JULIA_LOAD_PATH', self.julia_load_path) - txt += self.module_generator.set_environment('EBJULIA_USER_LOAD_PATH', self.user_load_path) - txt += self.module_generator.set_environment('EBJULIA_ADMIN_LOAD_PATH', self.admin_load_path) - txt += self.module_generator.set_environment('EBJULIA_STD_LOAD_PATH', self.std_load_paths) - - txt += self.module_generator.set_environment('EBJULIA_ENV_NAME', '-'.join([self.version, self.get_environment_folder()])) - - return txt diff --git a/Custom_EasyBlocks/juliabundle.py b/Custom_EasyBlocks/juliabundle.py deleted file mode 100644 index 6a6dc4352ec0a52e5a6139d1315768c1c12a6dbd..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/juliabundle.py +++ /dev/null @@ -1,116 +0,0 @@ -## -# Copyright 2009-2019 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing Julia packages, implemented as an easyblock - -@author: Victor Holanda (CSCS) -@author: Samuel Omlin (CSCS) -minor adjustments by Jens Henrik Goebbert (JSC) -""" -import os -import socket - -from easybuild.easyblocks.generic.bundle import Bundle -from easybuild.tools.config import build_option -from easybuild.tools import systemtools -#from easybuild.easyblocks.generic.pythonpackage import PythonPackage, det_pylibdir -from .juliapackage import JuliaPackage - - -class JuliaBundle(Bundle): - """ - Install an Julia package as a separate module, or as an extension. - """ - - @staticmethod - def extra_options(extra_vars=None): - """Easyconfig parameters specific to bundles of Python packages.""" - #50 extra_vars = { - #51 'arch_name': [None, "Change julia's Project.toml pathname", CUSTOM], - #52 } - if extra_vars is None: - extra_vars = {} - # combine custom easyconfig parameters of Bundle & JuliaPackage - extra_vars = Bundle.extra_options(extra_vars) - return JuliaPackage.extra_options(extra_vars) - - def get_environment_folder(self): - env_path = '' - - systemname = 'default' - if self.cfg['system_name']: - systemname = self.cfg['system_name'] - - if self.cfg['arch_name']: - env_path = '-'.join([systemname, self.cfg['arch_name']]) - return env_path - - arch = systemtools.get_cpu_architecture() - cpu_family = systemtools.get_cpu_family() - env_path = '-'.join([systemname, cpu_family, arch]) - return env_path - - def __init__(self, *args, **kwargs): - super(JuliaBundle, self).__init__(*args, **kwargs) - self.cfg['exts_defaultclass'] = 'JuliaPackage' - - # need to disable templating to ensure that actual value for exts_default_options is updated... - prev_enable_templating = self.cfg.enable_templating - self.cfg.enable_templating = False - - # set default options for extensions according to relevant top-level easyconfig parameters - julpkg_keys = JuliaPackage.extra_options().keys() - for key in julpkg_keys: - if key not in self.cfg['exts_default_options']: - self.cfg['exts_default_options'][key] = self.cfg[key] - - self.cfg['exts_default_options']['download_dep_fail'] = True - self.log.info("Detection of downloaded extension dependencies is enabled") - - self.cfg.enable_templating = prev_enable_templating - - self.log.info("exts_default_options: %s", self.cfg['exts_default_options']) - - self.extensions_depot = 'extensions' - - self.admin_load_path = os.path.join(self.extensions_depot, "environments", '-'.join([self.version, self.get_environment_folder()])) - - def sanity_check_step(self): - """Custom sanity check for Julia.""" - - custom_paths = { - 'files': [], - 'dirs': ['extensions'], - } - super(JuliaBundle, self).sanity_check_step(custom_paths=custom_paths) - - def make_module_extra(self, *args, **kwargs): - txt = super(JuliaBundle, self).make_module_extra(*args, **kwargs) - - txt += self.module_generator.prepend_paths('EBJULIA_ADMIN_DEPOT_PATH', self.extensions_depot) - - txt += self.module_generator.prepend_paths('EBJULIA_ADMIN_LOAD_PATH', self.admin_load_path) - - return txt diff --git a/Custom_EasyBlocks/juliapackage.py b/Custom_EasyBlocks/juliapackage.py deleted file mode 100644 index 58cf4cd4b285828e0e936c14f9c31a1806565a50..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/juliapackage.py +++ /dev/null @@ -1,153 +0,0 @@ -## -# Copyright 2009-2019 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing Julia packages, implemented as an easyblock - -@author: Victor Holanda (CSCS) -@author: Samuel Omlin (CSCS) -minor adjustments by Jens Henrik Goebbert (JSC) -""" -import os -import sys - -import easybuild.tools.toolchain as toolchain - -from easybuild.framework.easyconfig import CUSTOM -from easybuild.framework.extensioneasyblock import ExtensionEasyBlock -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.run import run_cmd, parse_log_for_error - - -class JuliaPackage(ExtensionEasyBlock): - """ - Install an Julia package as a separate module, or as an extension. - """ - - @staticmethod - def extra_options(extra_vars=None): - if extra_vars is None: - extra_vars = {} - - extra_vars.update({ - 'system_name': [None, "Change julia's Project.toml pathname", CUSTOM], - 'arch_name': [None, "Change julia's Project.toml pathname", CUSTOM], - 'packagespec': [None, "Overwrite install options for Pkg.add(PackageSpec(<packagespec>))", CUSTOM], - 'mpiexec': [None, "Set the mpiexec command", CUSTOM], - 'mpiexec_args': [None, "Set the mpiexec command args", CUSTOM], - 'mpi_path': [None, "Set the MPI installation path", CUSTOM], - 'mpicc': [None, "Set mpicc command", "mpicc"], - }) - return ExtensionEasyBlock.extra_options(extra_vars=extra_vars) - - def __init__(self, *args, **kwargs): - super(JuliaPackage, self).__init__(*args, **kwargs) - self.package_name = self.name - names = self.package_name.split('.') - if len(names) > 1: - self.package_name = ''.join(names[:-1]) - - julia_env_name = os.getenv('EBJULIA_ENV_NAME', '') - self.depot = os.path.join(self.installdir, 'extensions') - self.projectdir = os.path.join(self.depot, 'environments', julia_env_name) - self.log.info("Depot for package installations: %s" % self.depot) - - def patch_step(self, beginpath=None): - pass - - def fetch_sources(self, sources=None, checksums=None): - pass - - def extract_step(self): - """Source should not be extracted.""" - pass - - def configure_step(self): - """No configuration for installing Julia packages.""" - pass - - def build_step(self): - """No separate build step for Julia packages.""" - pass - - def make_julia_cmd(self, remove=False): - """Create a command to run in julia to install an julia package.""" - - if self.cfg['packagespec']: - package_spec = self.cfg['packagespec'] - else: - package_spec = "name=\"%s\", version=\"%s\"" % (self.package_name, self.version) - - pre_cmd = '%s unset EBJULIA_USER_DEPOT_PATH && unset EBJULIA_ADMIN_DEPOT_PATH && export JULIA_DEPOT_PATH=%s && export JULIA_PROJECT=%s' % (self.cfg['preinstallopts'], self.depot, self.projectdir) - - if self.cfg['mpi_path']: - pre_cmd += ' && export JULIA_MPI_BINARY=system' - pre_cmd += ' && export JULIA_MPI_PATH="%s"' % self.cfg['mpi_path'] - - if self.cfg['mpiexec']: - pre_cmd += ' && export JULIA_MPIEXEC="%s"' % self.cfg['mpiexec'] - - if self.cfg['mpiexec_args']: - pre_cmd += ' && export JULIA_MPIEXEC_ARGS="%s"' % self.cfg['mpiexec_args'] - - if self.cfg['mpicc']: - pre_cmd += ' && export JULIA_MPICC="%s"' % self.cfg['mpicc'] - - if self.cfg['arch_name'] == 'gpu': - pre_cmd += ' && export JULIA_CUDA_USE_BINARYBUILDER=false' - - if remove: - cmd = ' && '.join([pre_cmd, "julia --eval 'using Pkg; Pkg.rm(PackageSpec(%s))'" % package_spec]) - else: - cmd = ' && '.join([pre_cmd, "julia --eval 'using Pkg; Pkg.add(PackageSpec(%s))'" % package_spec]) - - return cmd - - def install_step(self): - """Install procedure for Julia packages.""" - - cmd = self.make_julia_cmd(remove=False) - cmdttdouterr, _ = run_cmd(cmd, log_all=True, simple=False, regexp=False) - - cmderrors = parse_log_for_error(cmdttdouterr, regExp="^ERROR:") - if cmderrors: - cmd = self.make_julia_cmd(remove=True) - run_cmd(cmd, log_all=False, log_ok=False, simple=False, inp=sys.stdin, regexp=False) - raise EasyBuildError("Errors detected during installation of Julia package %s!", self.name) - - self.log.info("Julia package %s installed succesfully" % self.name) - - def run(self): - """Install Julia package as an extension.""" - self.install_step() - - def sanity_check_step(self, *args, **kwargs): - """ - Custom sanity check for Julia packages - """ - #NOTE: we don't use Pkg.status with arguments as only supported for Julia >=v1.1 - cmd = "unset EBJULIA_USER_DEPOT_PATH && unset EBJULIA_ADMIN_DEPOT_PATH && export JULIA_DEPOT_PATH=%s && export JULIA_PROJECT=%s && julia --eval 'using Pkg; Pkg.status()'" % (self.depot, self.projectdir) - cmdttdouterr, _ = run_cmd(cmd, log_all=True, simple=False, regexp=False) - self.log.error("Julia package %s sanity returned %s" % (self.name, cmdttdouterr)) - return len(parse_log_for_error(cmdttdouterr, regExp="%s\s+v%s" % (self.package_name, self.version))) != 0 diff --git a/Custom_EasyBlocks/lammps.py b/Custom_EasyBlocks/lammps.py deleted file mode 100644 index c058e04ec1f42d715caba41dceb0e4884ff938f2..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/lammps.py +++ /dev/null @@ -1,437 +0,0 @@ -# -*- coding: utf-8 -*- -## -# Copyright 2009-2020 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# the Hercules foundation (http://www.herculesstichting.be/in_English) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -@author: Pavel Grochal (INUITS) -@author: Kenneth Hoste (Ghent University) -@author: Alan O'Cais (Juelich Supercomputing Centre) -""" - -import os -import tempfile - -import easybuild.tools.environment as env -import easybuild.tools.toolchain as toolchain -from easybuild.framework.easyconfig import CUSTOM, MANDATORY -from easybuild.tools.build_log import EasyBuildError, print_warning, print_msg -from easybuild.tools.config import build_option -from easybuild.tools.modules import get_software_root, get_software_version -from easybuild.tools.run import run_cmd -from easybuild.tools.systemtools import get_shared_lib_ext - -from easybuild.easyblocks.generic.cmakemake import CMakeMake - -KOKKOS_CPU_ARCH_LIST = [ - 'AMDAVX', # AMD 64-bit x86 CPU (AVX 1) - 'ZEN', # AMD Zen class CPU (AVX 2) - 'ZEN2', # AMD Zen2 class CPU (AVX 2) - 'ARMV80', # ARMv8.0 Compatible CPU - 'ARMV81', # ARMv8.1 Compatible CPU - 'ARMV8_THUNDERX', # ARMv8 Cavium ThunderX CPU - 'ARMV8_THUNDERX2', # ARMv8 Cavium ThunderX2 CPU - 'WSM', # Intel Westmere CPU (SSE 4.2) - 'SNB', # Intel Sandy/Ivy Bridge CPU (AVX 1) - 'HSW', # Intel Haswell CPU (AVX 2) - 'BDW', # Intel Broadwell Xeon E-class CPU (AVX 2 + transactional mem) - 'SKX', # Intel Sky Lake Xeon E-class HPC CPU (AVX512 + transactional mem) - 'KNC', # Intel Knights Corner Xeon Phi - 'KNL', # Intel Knights Landing Xeon Phi - 'BGQ', # IBM Blue Gene/Q CPU - 'POWER7', # IBM POWER7 CPU - 'POWER8', # IBM POWER8 CPU - 'POWER9', # IBM POWER9 CPU - 'KEPLER30', # NVIDIA Kepler generation CC 3.0 GPU - 'KEPLER32', # NVIDIA Kepler generation CC 3.2 GPU - 'KEPLER35', # NVIDIA Kepler generation CC 3.5 GPU - 'KEPLER37', # NVIDIA Kepler generation CC 3.7 GPU - 'MAXWELL50', # NVIDIA Maxwell generation CC 5.0 GPU - 'MAXWELL52', # NVIDIA Maxwell generation CC 5.2 GPU - 'MAXWELL53', # NVIDIA Maxwell generation CC 5.3 GPU - 'PASCAL60', # NVIDIA Pascal generation CC 6.0 GPU - 'PASCAL61', # NVIDIA Pascal generation CC 6.1 GPU - 'VOLTA70', # NVIDIA Volta generation CC 7.0 GPU - 'VOLTA72', # NVIDIA Volta generation CC 7.2 GPU - 'TURING75', # NVIDIA Turing generation CC 7.5 GPU - 'AMPERE80', # NVIDIA Ampere generation CC 8.0 GPU - 'VEGA900', # AMD GPU MI25 GFX900 - 'VEGA906', # AMD GPU MI50/MI60 GFX906 - 'INTEL_GEN', #Intel GPUs Gen9+ -] - -KOKKOS_CPU_MAPPING = { - 'sandybridge': 'SNB', - 'ivybridge': 'SNB', - 'haswell': 'HSW', - 'broadwell': 'BDW', - 'skylake_avx512': 'SKX', - 'cascadelake': 'SKX', - 'knights-landing': 'KNL', - 'zen': 'ZEN', - 'zen2': 'ZEN2', # KOKKOS doesn't seem to distinguish between zen and zen2 (yet?) -} - - -KOKKOS_GPU_ARCH_TABLE = { - '3.0': 'KEPLER30', # NVIDIA Kepler generation CC 3.0 - '3.2': 'KEPLER32', # NVIDIA Kepler generation CC 3.2 - '3.5': 'KEPLER35', # NVIDIA Kepler generation CC 3.5 - '3.7': 'KEPLER37', # NVIDIA Kepler generation CC 3.7 - '5.0': 'MAXWELL50', # NVIDIA Maxwell generation CC 5.0 - '5.2': 'MAXWELL52', # NVIDIA Maxwell generation CC 5.2 - '5.3': 'MAXWELL53', # NVIDIA Maxwell generation CC 5.3 - '6.0': 'PASCAL60', # NVIDIA Pascal generation CC 6.0 - '6.1': 'PASCAL61', # NVIDIA Pascal generation CC 6.1 - '7.0': 'VOLTA70', # NVIDIA Volta generation CC 7.0 - '7.2': 'VOLTA72', # NVIDIA Volta generation CC 7.2 - '7.5': 'TURING75', # NVIDIA Turing generation CC 7.5 - '8.0': 'AMPERE80', # NVIDIA Ampere generation CC 8.0 -} - -PKG_PREFIX = 'PKG_' -PKG_USER_PREFIX = PKG_PREFIX + 'USER-' - - -class EB_LAMMPS(CMakeMake): - """ - Support for building and installing LAMMPS - """ - - def __init__(self, *args, **kwargs): - """LAMMPS easyblock constructor: determine whether we should build with CUDA support enabled.""" - super(EB_LAMMPS, self).__init__(*args, **kwargs) - - cuda_dep = 'cuda' in [dep['name'].lower() for dep in self.cfg.dependencies()] - cuda_toolchain = hasattr(self.toolchain, 'COMPILER_CUDA_FAMILY') - self.cuda = cuda_dep or cuda_toolchain - - @staticmethod - def extra_options(**kwargs): - """Custom easyconfig parameters for LAMMPS""" - extra_vars = CMakeMake.extra_options() - extra_vars.update({ - # see https://developer.nvidia.com/cuda-gpus - 'cuda_compute_capabilities': [[], "List of CUDA compute capabilities to build with", CUSTOM], - 'general_packages': [None, "List of general packages without '%s' prefix." % PKG_PREFIX, MANDATORY], - 'kokkos': [True, "Enable kokkos build.", CUSTOM], - 'kokkos_arch': [None, "Set kokkos processor arch manually, if auto-detection doesn't work.", CUSTOM], - 'user_packages': [None, "List user packages without '%s' prefix." % PKG_USER_PREFIX, MANDATORY], - }) - extra_vars['separate_build_dir'][0] = True - return extra_vars - - def prepare_step(self, *args, **kwargs): - """Custom prepare step for LAMMPS.""" - super(EB_LAMMPS, self).prepare_step(*args, **kwargs) - - # Unset LIBS when using both KOKKOS and CUDA - it will mix lib paths otherwise - if self.cfg['kokkos'] and self.cuda: - env.unset_env_vars(['LIBS']) - - def configure_step(self, **kwargs): - """Custom configuration procedure for LAMMPS.""" - - # list of CUDA compute capabilities to use can be specifed in two ways (where (2) overrules (1)): - # (1) in the easyconfig file, via the custom cuda_compute_capabilities; - # (2) in the EasyBuild configuration, via --cuda-compute-capabilities configuration option; - ec_cuda_cc = self.cfg['cuda_compute_capabilities'] - cfg_cuda_cc = build_option('cuda_compute_capabilities') - if cfg_cuda_cc and not isinstance(cfg_cuda_cc, list): - raise EasyBuildError("cuda_compute_capabilities in easyconfig should be provided as list of strings, " + - "(for example ['8.0', '7.5']). Got %s" % cfg_cuda_cc) - cuda_cc = check_cuda_compute_capabilities(cfg_cuda_cc, ec_cuda_cc, cuda=self.cuda) - - # cmake has its own folder - self.cfg['srcdir'] = os.path.join(self.start_dir, 'cmake') - - # Enable following packages, if not configured in easyconfig - default_options = ['BUILD_DOC', 'BUILD_EXE', 'BUILD_LIB', 'BUILD_TOOLS'] - for option in default_options: - if "-D%s=" % option not in self.cfg['configopts']: - self.cfg.update('configopts', '-D%s=on' % option) - - # enable building of shared libraries, if not specified already via configopts - if self.cfg['build_shared_libs'] is None and '-DBUILD_SHARED_LIBS=' not in self.cfg['configopts']: - self.cfg['build_shared_libs'] = True - - # Enable gzip, libpng and libjpeg-turbo support when its included as dependency - deps = [ - ('gzip', 'GZIP'), - ('libpng', 'PNG'), - ('libjpeg-turbo', 'JPEG'), - ] - for dep_name, with_name in deps: - with_opt = '-DWITH_%s=' % with_name - if with_opt not in self.cfg['configopts']: - if get_software_root(dep_name): - self.cfg.update('configopts', with_opt + 'yes') - else: - self.cfg.update('configopts', with_opt + 'no') - - # Disable auto-downloading/building Eigen dependency: - if '-DDOWNLOAD_EIGEN3=' not in self.cfg['configopts']: - self.cfg.update('configopts', '-DDOWNLOAD_EIGEN3=no') - - # Compiler complains about 'Eigen3_DIR' not being set, but actually it needs 'EIGEN3_INCLUDE_DIR'. - # see: https://github.com/lammps/lammps/issues/1110 - # Enable Eigen when its included as dependency dependency: - eigen_root = get_software_root('Eigen') - if eigen_root: - if '-DEIGEN3_INCLUDE_DIR=' not in self.cfg['configopts']: - self.cfg.update('configopts', '-DEIGEN3_INCLUDE_DIR=%s/include/Eigen' % get_software_root('Eigen')) - if '-DEigen3_DIR=' not in self.cfg['configopts']: - self.cfg.update('configopts', '-DEigen3_DIR=%s/share/eigen3/cmake/' % get_software_root('Eigen')) - - # LAMMPS Configuration Options - # https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - if self.cfg['general_packages']: - for package in self.cfg['general_packages']: - self.cfg.update('configopts', '-D%s%s=on' % (PKG_PREFIX, package)) - - if self.cfg['user_packages']: - for package in self.cfg['user_packages']: - self.cfg.update('configopts', '-D%s%s=on' % (PKG_USER_PREFIX, package)) - - # Optimization settings - pkg_opt = '-D%sOPT=' % PKG_PREFIX - if pkg_opt not in self.cfg['configopts']: - self.cfg.update('configopts', pkg_opt + 'on') - - # USER-INTEL enables optimizations on Intel processors. GCC has also partial support for some of them. - pkg_user_intel = '-D%sINTEL=' % PKG_USER_PREFIX - if pkg_user_intel not in self.cfg['configopts']: - if self.toolchain.comp_family() in [toolchain.GCC, toolchain.INTELCOMP]: - self.cfg.update('configopts', pkg_user_intel + 'on') - - # MPI/OpenMP - if self.toolchain.options.get('usempi', None): - self.cfg.update('configopts', '-DBUILD_MPI=yes') - if self.toolchain.options.get('openmp', None): - self.cfg.update('configopts', '-DBUILD_OMP=yes') - self.cfg.update('configopts', '-D%sOMP=on' % PKG_USER_PREFIX) - - # FFTW - if get_software_root("imkl") or get_software_root("FFTW"): - if '-DFFT=' not in self.cfg['configopts']: - if get_software_root("imkl"): - self.log.info("Using the MKL") - self.cfg.update('configopts', '-DFFT=MKL') - else: - self.log.info("Using FFTW") - self.cfg.update('configopts', '-DFFT=FFTW3') - if '-DFFT_PACK=' not in self.cfg['configopts']: - self.cfg.update('configopts', '-DFFT_PACK=array') - - # https://lammps.sandia.gov/doc/Build_extras.html - # KOKKOS - if self.cfg['kokkos']: - self.cfg.update('configopts', '-D%sKOKKOS=on' % PKG_PREFIX) - - processor_arch, gpu_arch = get_kokkos_arch(cuda_cc, self.cfg['kokkos_arch'], cuda=self.cuda) - - if self.toolchain.options.get('openmp', None): - self.cfg.update('configopts', '-DKokkos_ENABLE_OPENMP=yes') - self.cfg.update('configopts', '-DKokkos_ARCH_%s=yes' % processor_arch) - - # if KOKKOS and CUDA - if self.cuda: - nvcc_wrapper_path = os.path.join(self.start_dir, "lib", "kokkos", "bin", "nvcc_wrapper") - self.cfg.update('configopts', '-DKokkos_ENABLE_CUDA=yes') - self.cfg.update('configopts', '-DKokkos_ARCH_%s=yes' % gpu_arch) - self.cfg.update('configopts', '-DCMAKE_CXX_COMPILER="%s"' % nvcc_wrapper_path) - self.cfg.update('configopts', '-DCMAKE_CXX_FLAGS="-ccbin $CXX $CXXFLAGS"') - - # CUDA only - elif self.cuda: - self.cfg.update('configopts', '-D%sGPU=on' % PKG_PREFIX) - self.cfg.update('configopts', '-DGPU_API=cuda') - self.cfg.update('configopts', '-DGPU_ARCH=%s' % get_cuda_gpu_arch(cuda_cc)) - - # avoid that pip (ab)uses $HOME/.cache/pip - # cfr. https://pip.pypa.io/en/stable/reference/pip_install/#caching - env.setvar('XDG_CACHE_HOME', tempfile.gettempdir()) - self.log.info("Using %s as pip cache directory", os.environ['XDG_CACHE_HOME']) - - return super(EB_LAMMPS, self).configure_step() - - def sanity_check_step(self, *args, **kwargs): - """Run custom sanity checks for LAMMPS files, dirs and commands.""" - check_files = [ - 'atm', 'balance', 'colloid', 'crack', 'dipole', 'friction', - 'hugoniostat', 'indent', 'melt', 'message', 'min', 'msst', - 'nemd', 'obstacle', 'pour', 'voronoi', - ] - - custom_commands = [ - # LAMMPS test - you need to call specific test file on path - """python -c 'from lammps import lammps; l=lammps(); l.file("%s")'""" % - # The path is joined by "build_dir" (start_dir)/examples/filename/in.filename - os.path.join(self.start_dir, "examples", "%s" % check_file, "in.%s" % check_file) - # And this should be done for every file specified above - for check_file in check_files - ] - - # Execute sanity check commands within an initialized MPI in MPI enabled toolchains - if self.toolchain.options.get('usempi', None): - custom_commands = [self.toolchain.mpi_cmd_for(cmd, 1) for cmd in custom_commands] - - shlib_ext = get_shared_lib_ext() - custom_paths = { - 'files': [ - os.path.join('bin', 'lmp'), - os.path.join('include', 'lammps', 'library.h'), - os.path.join('lib64', 'liblammps.%s' % shlib_ext), - ], - 'dirs': [], - } - - python = get_software_version('Python') - if python: - pyshortver = '.'.join(get_software_version('Python').split('.')[:2]) - pythonpath = os.path.join('lib', 'python%s' % pyshortver, 'site-packages') - custom_paths['dirs'].append(pythonpath) - - return super(EB_LAMMPS, self).sanity_check_step(custom_commands=custom_commands, custom_paths=custom_paths) - - def make_module_extra(self): - """Add install path to PYTHONPATH""" - - txt = super(EB_LAMMPS, self).make_module_extra() - - python = get_software_version('Python') - if python: - pyshortver = '.'.join(get_software_version('Python').split('.')[:2]) - pythonpath = os.path.join('lib', 'python%s' % pyshortver, 'site-packages') - txt += self.module_generator.prepend_paths('PYTHONPATH', [pythonpath]) - - txt += self.module_generator.prepend_paths('PYTHONPATH', ["lib64"]) - txt += self.module_generator.prepend_paths('LD_LIBRARY_PATH', ["lib64"]) - - return txt - - -def get_cuda_gpu_arch(cuda_cc): - """Return CUDA gpu ARCH in LAMMPS required format. Example: 'sm_32' """ - # Get largest cuda supported - return 'sm_%s' % str(sorted(cuda_cc, reverse=True)[0]).replace(".", "") - - -def get_kokkos_arch(cuda_cc, kokkos_arch, cuda=None): - """ - Return KOKKOS ARCH in LAMMPS required format, which is either 'CPU_ARCH' or 'CPU_ARCH;GPU_ARCH'. - - see: https://lammps.sandia.gov/doc/Build_extras.html#kokkos - """ - if cuda is None or not isinstance(cuda, bool): - cuda = get_software_root('CUDA') - - processor_arch = None - - if kokkos_arch: - if kokkos_arch not in KOKKOS_CPU_ARCH_LIST: - warning_msg = "Specified CPU ARCH (%s) " % kokkos_arch - warning_msg += "was not found in listed options [%s]." % KOKKOS_CPU_ARCH_LIST - warning_msg += "Still might work though." - print_warning(warning_msg) - processor_arch = kokkos_arch - - else: - warning_msg = "kokkos_arch not set. Trying to auto-detect CPU arch." - print_warning(warning_msg) - - processor_arch = KOKKOS_CPU_MAPPING.get(get_cpu_arch()) - - if not processor_arch: - error_msg = "Couldn't determine CPU architecture, you need to set 'kokkos_arch' manually." - raise EasyBuildError(error_msg) - - print_msg("Determined cpu arch: %s" % processor_arch) - - gpu_arch = None - if cuda: - # CUDA below - for cc in sorted(cuda_cc, reverse=True): - gpu_arch = KOKKOS_GPU_ARCH_TABLE.get(str(cc)) - if gpu_arch: - break - else: - warning_msg = "(%s) GPU ARCH was not found in listed options." % cc - print_warning(warning_msg) - - if not gpu_arch: - error_msg = "Specified GPU ARCH (%s) " % cuda_cc - error_msg += "was not found in listed options [%s]." % KOKKOS_GPU_ARCH_TABLE - raise EasyBuildError(error_msg) - - return processor_arch, gpu_arch - - -def check_cuda_compute_capabilities(cfg_cuda_cc, ec_cuda_cc, cuda=None): - """ - Checks if cuda-compute-capabilities is set and prints warning if it gets declared on multiple places. - - :param cfg_cuda_cc: cuda-compute-capabilities from cli config - :param ec_cuda_cc: cuda-compute-capabilities from easyconfig - :param cuda: boolean to check if cuda should be enabled or not - :return: returns preferred cuda-compute-capabilities - """ - - if cuda is None or not isinstance(cuda, bool): - cuda = get_software_root('CUDA') - - cuda_cc = cfg_cuda_cc or ec_cuda_cc or [] - - if cuda: - if cfg_cuda_cc and ec_cuda_cc: - warning_msg = "cuda_compute_capabilities specified in easyconfig (%s)" % ec_cuda_cc - warning_msg += " are overruled by " - warning_msg += "--cuda-compute-capabilities configuration option (%s)" % cfg_cuda_cc - print_warning(warning_msg) - elif not cuda_cc: - error_msg = "No CUDA compute capabilities specified.\nTo build LAMMPS with Cuda you need to use" - error_msg += "the --cuda-compute-capabilities configuration option or the cuda_compute_capabilities " - error_msg += "easyconfig parameter to specify a list of CUDA compute capabilities to compile with." - raise EasyBuildError(error_msg) - - elif cuda_cc: - warning_msg = "Missing CUDA package (in dependencies), " - warning_msg += "but 'cuda_compute_capabilities' option was specified." - print_warning(warning_msg) - - return cuda_cc - - -def get_cpu_arch(): - """ - Checks for CPU architecture via archspec library. - https://github.com/archspec/archspec - Archspec should be bundled as build-dependency to determine CPU arch. - It can't be called directly in code because it gets available only after prepare_step. - - :return: returns detected cpu architecture - """ - out, ec = run_cmd("python -c 'from archspec.cpu import host; print(host())'", simple=False) - if ec: - raise EasyBuildError("Failed to determine CPU architecture: %s", out) - return out.strip() diff --git a/Custom_EasyBlocks/llvm.py b/Custom_EasyBlocks/llvm.py deleted file mode 100644 index 6d67e5f5c3464d44dc4bb00628bb2919a8f67c8e..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/llvm.py +++ /dev/null @@ -1,83 +0,0 @@ -## -# Copyright 2020 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing LLVM, implemented as an easyblock - -@author: Simon Branford (University of Birmingham) -""" -from easybuild.easyblocks.clang import CLANG_TARGETS, DEFAULT_TARGETS_MAP -from easybuild.easyblocks.generic.cmakemake import CMakeMake -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.modules import get_software_root -from easybuild.tools.systemtools import get_cpu_architecture - - -class EB_LLVM(CMakeMake): - """ - Support for building and installing LLVM - """ - - @staticmethod - def extra_options(): - extra_vars = CMakeMake.extra_options() - extra_vars.update({ - 'build_targets': [None, "Build targets for LLVM (host architecture if None). Possible values: " + - ', '.join(CLANG_TARGETS), CUSTOM], - 'enable_rtti': [True, "Enable RTTI", CUSTOM], - }) - return extra_vars - - def configure_step(self): - """ - Install extra tools in bin/; enable zlib if it is a dep; optionally enable rtti; and set the build target - """ - self.cfg.update('configopts', '-DLLVM_INSTALL_UTILS=ON') - - if get_software_root('zlib'): - self.cfg.update('configopts', '-DLLVM_ENABLE_ZLIB=ON') - - if self.cfg["enable_rtti"]: - self.cfg.update('configopts', '-DLLVM_ENABLE_RTTI=ON') - - build_targets = self.cfg['build_targets'] - if build_targets is None: - arch = get_cpu_architecture() - default_targets = DEFAULT_TARGETS_MAP.get(arch, None) - if default_targets: - self.cfg['build_targets'] = build_targets = default_targets - self.log.debug("Using %s as default build targets for CPU architecture %s.", default_targets, arch) - else: - raise EasyBuildError("No default build targets defined for CPU architecture %s.", arch) - - unknown_targets = [target for target in build_targets if target not in CLANG_TARGETS] - - if unknown_targets: - raise EasyBuildError("Some of the chosen build targets (%s) are not in %s.", - ', '.join(unknown_targets), ', '.join(CLANG_TARGETS)) - - self.cfg.update('configopts', '-DLLVM_TARGETS_TO_BUILD="%s"' % ';'.join(build_targets)) - - super(EB_LLVM, self).configure_step() diff --git a/Custom_EasyBlocks/namd.py b/Custom_EasyBlocks/namd.py deleted file mode 100644 index 2b760beec87afb83bfe7f38fba343ab124e99b82..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/namd.py +++ /dev/null @@ -1,228 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2013-2019 CaSToRC, The Cyprus Institute -# Authors:: George Tsouloupas <g.tsouloupas@cyi.ac.cy> -# License:: MIT/GPL -# $Id$ -# -## -""" -Easybuild support for building NAMD, implemented as an easyblock - -@author: George Tsouloupas (Cyprus Institute) -@author: Kenneth Hoste (Ghent University) -""" -import glob -import os -import re -import shutil -from distutils.version import LooseVersion - -import easybuild.tools.toolchain as toolchain -from easybuild.easyblocks.generic.makecp import MakeCp -from easybuild.framework.easyconfig import CUSTOM, MANDATORY -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.config import build_option -from easybuild.tools.filetools import apply_regex_substitutions, change_dir, extract_file -from easybuild.tools.modules import get_software_root, get_software_version -from easybuild.tools.run import run_cmd -from easybuild.tools.systemtools import POWER, X86_64, get_cpu_architecture - - -class EB_NAMD(MakeCp): - """ - Support for building NAMD - """ - @staticmethod - def extra_options(): - """Define extra NAMD-specific easyconfig parameters.""" - extra = MakeCp.extra_options() - # files_to_copy is not mandatory here - extra['files_to_copy'][2] = CUSTOM - extra.update({ - # see http://charm.cs.illinois.edu/manuals/html/charm++/A.html - 'charm_arch': [None, "Charm++ target architecture", MANDATORY], - 'charm_extra_cxxflags': ['', "Extra C++ compiler options to use for building Charm++", CUSTOM], - 'charm_opts': ['--with-production', "Charm++ build options", CUSTOM], - 'namd_basearch': [None, "NAMD base target architecture (compiler family is appended)", CUSTOM], - 'namd_cfg_opts': ['', "NAMD configure options", CUSTOM], - 'runtest': [True, "Run NAMD test case after building", CUSTOM], - }) - - return extra - - def __init__(self, *args, **kwargs): - """Custom easyblock constructor for NAMD, initialize class variables.""" - super(EB_NAMD, self).__init__(*args, **kwargs) - self.namd_arch = None - - def prepare_step(self, *args, **kwargs): - """Prepare build environment.""" - super(EB_NAMD, self).prepare_step(*args, **kwargs) - - if self.cfg['namd_basearch'] is None: - - self.log.info("namd_basearch not specified, so determining it based a CPU arch...") - - arch = get_cpu_architecture() - if arch == X86_64: - basearch = 'Linux-x86_64' - elif arch == POWER: - basearch = 'Linux-POWER' - - self.cfg['namd_basearch'] = basearch - self.log.info("Derived value for 'namd_basearch': %s", self.cfg['namd_basearch']) - - def extract_step(self): - """Custom extract step for NAMD, we need to extract charm++ so we can patch it.""" - super(EB_NAMD, self).extract_step() - - change_dir(self.src[0]['finalpath']) - self.charm_tarballs = glob.glob('charm-*.tar') - if len(self.charm_tarballs) != 1: - raise EasyBuildError("Expected to find exactly one tarball for Charm++, found: %s", self.charm_tarballs) - - srcdir = extract_file(self.charm_tarballs[0], os.getcwd(), change_into_dir=False) - change_dir(srcdir) - - def configure_step(self): - """Custom configure step for NAMD, we build charm++ first (if required).""" - - # complete Charm ++ and NAMD architecture string with compiler family - comp_fam = self.toolchain.comp_family() - if self.toolchain.options.get('usempi', False): - charm_arch_comp = 'mpicxx' - else: - charm_arch_comps = { - toolchain.GCC: 'gcc', - toolchain.INTELCOMP: 'icc', - } - charm_arch_comp = charm_arch_comps.get(comp_fam, None) - namd_comps = { - toolchain.GCC: 'g++', - toolchain.INTELCOMP: 'icc', - } - namd_comp = namd_comps.get(comp_fam, None) - if charm_arch_comp is None or namd_comp is None: - raise EasyBuildError("Unknown compiler family, can't complete Charm++/NAMD target architecture.") - - # NOTE: important to add smp BEFORE the compiler - # charm arch style is: mpi-linux-x86_64-smp-mpicxx - # otherwise the setting of name_charm_arch below will get things - # in the wrong order - if self.toolchain.options.get('openmp', False): - self.cfg.update('charm_arch', 'smp') - self.cfg.update('charm_arch', charm_arch_comp) - self.log.info("Updated 'charm_arch': %s", self.cfg['charm_arch']) - - self.namd_arch = '%s-%s' % (self.cfg['namd_basearch'], namd_comp) - self.log.info("Completed NAMD target architecture: %s", self.namd_arch) - - cmd = "./build charm++ %(arch)s %(opts)s --with-numa -j%(parallel)s '%(cxxflags)s'" % { - 'arch': self.cfg['charm_arch'], - 'cxxflags': os.environ['CXXFLAGS'] + ' -DMPICH_IGNORE_CXX_SEEK ' + self.cfg['charm_extra_cxxflags'], - 'opts': self.cfg['charm_opts'], - 'parallel': self.cfg['parallel'], - } - charm_subdir = '.'.join(os.path.basename(self.charm_tarballs[0]).split('.')[:-1]) - self.log.debug("Building Charm++ using cmd '%s' in '%s'" % (cmd, charm_subdir)) - run_cmd(cmd, path=charm_subdir) - - # compiler (options) - self.cfg.update('namd_cfg_opts', '--cc "%s" --cc-opts "%s"' % (os.environ['CC'], os.environ['CFLAGS'])) - cxxflags = os.environ['CXXFLAGS'] - if LooseVersion(self.version) >= LooseVersion('2.12'): - cxxflags += ' --std=c++11' - self.cfg.update('namd_cfg_opts', '--cxx "%s" --cxx-opts "%s"' % (os.environ['CXX'], cxxflags)) - - # NAMD dependencies: CUDA, TCL, FFTW - # Cuda and MPI don't mix on the latest NAMD - #cuda = get_software_root('CUDA') - #if cuda: - # self.cfg.update('namd_cfg_opts', "--with-cuda --cuda-prefix %s" % cuda) - - tcl = get_software_root('Tcl') - if tcl: - self.cfg.update('namd_cfg_opts', '--with-tcl --tcl-prefix %s' % tcl) - tclversion = '.'.join(get_software_version('Tcl').split('.')[0:2]) - tclv_subs = [(r'-ltcl[\d.]*\s', '-ltcl%s ' % tclversion)] - - apply_regex_substitutions(os.path.join('arch', '%s.tcl' % self.cfg['namd_basearch']), tclv_subs) - - fftw = get_software_root('FFTW') - if fftw: - if LooseVersion(get_software_version('FFTW')) >= LooseVersion('3.0'): - if LooseVersion(self.version) >= LooseVersion('2.9'): - self.cfg.update('namd_cfg_opts', "--with-fftw3") - else: - raise EasyBuildError("Using FFTW v3.x only supported in NAMD v2.9 and up.") - else: - self.cfg.update('namd_cfg_opts', "--with-fftw") - self.cfg.update('namd_cfg_opts', "--fftw-prefix %s" % fftw) - - namd_charm_arch = "--charm-arch %s" % '-'.join(self.cfg['charm_arch'].strip().split()) - cmd = "./config %s %s %s " % (self.namd_arch, namd_charm_arch, self.cfg["namd_cfg_opts"]) - run_cmd(cmd) - - def build_step(self): - """Build NAMD for configured architecture""" - super(EB_NAMD, self).build_step(path=self.namd_arch) - - def test_step(self): - """Run NAMD test case.""" - if self.cfg['runtest']: - - if not build_option('mpi_tests'): - self.log.info("Skipping testing of NAMD since MPI testing is disabled") - return - - namdcmd = os.path.join(self.cfg['start_dir'], self.namd_arch, 'namd%s' % self.version.split('.')[0]) - if self.cfg['charm_arch'].startswith('mpi'): - namdcmd = self.toolchain.mpi_cmd_for(namdcmd, 2) - ppn = '' - if self.toolchain.options.get('openmp', False): - ppn = '+ppn 2' - cmd = "%(namd)s %(ppn)s %(testdir)s" % { - 'namd': namdcmd, - 'ppn': ppn, - 'testdir': os.path.join(self.cfg['start_dir'], self.namd_arch, 'src', 'alanin'), - } - out, ec = run_cmd(cmd, simple=False) - if ec == 0: - test_ok_regex = re.compile(r"(^Program finished.$|End of program\s*$)", re.M) - if test_ok_regex.search(out): - self.log.debug("Test '%s' ran fine." % cmd) - else: - raise EasyBuildError("Test '%s' failed ('%s' not found), output: %s", - cmd, test_ok_regex.pattern, out) - else: - self.log.debug("Skipping running NAMD test case after building") - - def install_step(self): - """Install by copying the correct directory to the install dir""" - srcdir = os.path.join(self.cfg['start_dir'], self.namd_arch) - try: - # copy all files, except for .rootdir (required to avoid cyclic copying) - for item in [x for x in os.listdir(srcdir) if x not in ['.rootdir']]: - fullsrc = os.path.join(srcdir, item) - if os.path.isdir(fullsrc): - shutil.copytree(fullsrc, os.path.join(self.installdir, item), symlinks=False) - elif os.path.isfile(fullsrc): - shutil.copy2(fullsrc, self.installdir) - except OSError as err: - raise EasyBuildError("Failed to copy NAMD build from %s to install directory: %s", srcdir, err) - - def make_module_extra(self): - """Add the install directory to PATH""" - txt = super(EB_NAMD, self).make_module_extra() - txt += self.module_generator.prepend_paths("PATH", ['']) - return txt - - def sanity_check_step(self): - """Custom sanity check for NAMD.""" - custom_paths = { - 'files': ['charmrun', 'flipbinpdb', 'flipdcd', 'namd%s' % self.version.split('.')[0], 'psfgen'], - 'dirs': ['inc'], - } - super(EB_NAMD, self).sanity_check_step(custom_paths=custom_paths) diff --git a/Custom_EasyBlocks/nvhpc.py b/Custom_EasyBlocks/nvhpc.py deleted file mode 100644 index 9df2f6f6b975087b5438eb1bc18ff125827c6e16..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/nvhpc.py +++ /dev/null @@ -1,304 +0,0 @@ -## -# Copyright 2015-2019 Bart Oldeman -# Copyright 2016-2021 Forschungszentrum Juelich -# -# This file is triple-licensed under GPLv2 (see below), MIT, and -# BSD three-clause licenses. -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for installing NVIDIA HPC SDK compilers, based on the easyblock for PGI compilers - -@author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada) -@author: Damian Alvarez (Forschungszentrum Juelich) -@author: Andreas Herten (Forschungszentrum Juelich) -""" -import os -import fileinput -import re -import stat -import sys -import platform - -from distutils.version import LooseVersion -from easybuild.easyblocks.generic.packedbinary import PackedBinary -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.filetools import adjust_permissions, write_file -from easybuild.tools.run import run_cmd -from easybuild.tools.modules import get_software_root, get_software_version -from easybuild.tools.config import build_option -from easybuild.tools.build_log import EasyBuildError, print_warning - - -# contents for siterc file to make PGI/NVHPC pick up $LIBRARY_PATH -# cfr. https://www.pgroup.com/support/link.htm#lib_path_ldflags -SITERC_LIBRARY_PATH = """ -# get the value of the environment variable LIBRARY_PATH -variable LIBRARY_PATH is environment(LIBRARY_PATH); - -# split this value at colons, separate by -L, prepend 1st one by -L -variable library_path is -default($if($LIBRARY_PATH,-L$replace($LIBRARY_PATH,":", -L))); - -# add the -L arguments to the link line -append LDLIBARGS=$library_path; - -# also include the location where libm & co live on Debian-based systems -# cfr. https://github.com/easybuilders/easybuild-easyblocks/pull/919 -append LDLIBARGS=-L/usr/lib/x86_64-linux-gnu; -""" - - -class EB_NVHPC(PackedBinary): - """ - Support for installing the NVIDIA HPC SDK (NVHPC) compilers - """ - - @staticmethod - def extra_options(): - extra_vars = { - 'default_cuda_version': [None, "CUDA Version to be used as default (10.2 or 11.0 or ...)", CUSTOM], - 'module_add_cuda': [False, "Add NVHPC's CUDA to module", CUSTOM], - 'module_add_math_libs': [False, "Add NVHPC's math libraries to module", CUSTOM], - 'module_add_nccl': [False, "Add NVHPC's NCCL library to module", CUSTOM], - 'module_add_nvshmem': [False, "Add NVHPC's NVSHMEM library to module", CUSTOM], - 'module_add_profilers': [False, "Add NVHPC's NVIDIA Profilers to module", CUSTOM], - 'module_byo_compilers': [False, "BYO Compilers: Remove compilers from module", CUSTOM], - 'module_nvhpc_own_mpi': [False, "Add NVHPC's packaged OpenMPI to module", CUSTOM] - } - return PackedBinary.extra_options(extra_vars) - - def __init__(self, *args, **kwargs): - """Easyblock constructor, define custom class variables specific to NVHPC.""" - super(EB_NVHPC, self).__init__(*args, **kwargs) - - # Ideally we should be using something like `easybuild.tools.systemtools.get_cpu_architecture` here, however, - # on `ppc64le` systems this function returns `POWER` instead of `ppc64le`. Since this path needs to reflect - # `arch` (https://easybuild.readthedocs.io/en/latest/version-specific/easyconfig_templates.html) the same - # procedure from `templates.py` was reused here: - architecture = 'Linux_%s' % platform.uname()[4] - self.nvhpc_install_subdir = os.path.join(architecture, self.version) - - def install_step(self): - """Install by running install command.""" - - # EULA for NVHPC must be accepted via --accept-eula-for EasyBuild configuration option, - # or via 'accept_eula = True' in easyconfig file - self.check_accepted_eula(more_info='https://docs.nvidia.com/hpc-sdk/eula/index.html') - - default_cuda_version = self.cfg['default_cuda_version'] - if default_cuda_version is None: - module_cuda_version_full = get_software_version('CUDA') - if module_cuda_version_full is not None: - default_cuda_version = '.'.join(module_cuda_version_full.split('.')[:2]) - else: - error_msg = "A default CUDA version is needed for installation of NVHPC. " - error_msg += "It can not be determined automatically and needs to be added manually. " - error_msg += "You can edit the easyconfig file, " - error_msg += "or use 'eb --try-amend=default_cuda_version=<version>'." - raise EasyBuildError(error_msg) - - # Parse default_compute_capability from different sources (CLI has priority) - ec_default_compute_capability = self.cfg['cuda_compute_capabilities'] - cfg_default_compute_capability = build_option('cuda_compute_capabilities') - if cfg_default_compute_capability is not None: - default_compute_capability = cfg_default_compute_capability - elif ec_default_compute_capability and ec_default_compute_capability is not None: - default_compute_capability = ec_default_compute_capability - else: - error_msg = "A default Compute Capability is needed for installation of NVHPC." - error_msg += "Please provide it either in the easyconfig file like 'cuda_compute_capabilities=\"7.0\"'," - error_msg += "or use 'eb --cuda-compute-capabilities=7.0' from the command line." - raise EasyBuildError(error_msg) - - # Extract first element of default_compute_capability list, if it is a list - if isinstance(default_compute_capability, list): - _before_default_compute_capability = default_compute_capability - default_compute_capability = _before_default_compute_capability[0] - warning_msg = "Replaced list of compute capabilities {} ".format(_before_default_compute_capability) - warning_msg += "with first element of list {}".format(default_compute_capability) - print_warning(warning_msg) - - # Remove dot-divider for CC; error out if it is not a string - if isinstance(default_compute_capability, str): - default_compute_capability = default_compute_capability.replace('.', '') - else: - raise EasyBuildError("Unexpected non-string value encountered for compute capability: %s", - default_compute_capability) - - nvhpc_env_vars = { - 'NVHPC_INSTALL_DIR': self.installdir, - 'NVHPC_SILENT': 'true', - 'NVHPC_DEFAULT_CUDA': str(default_cuda_version), # 10.2, 11.0 - 'NVHPC_STDPAR_CUDACC': str(default_compute_capability), # 70, 80; single value, no list! - } - cmd = "%s ./install" % ' '.join(['%s=%s' % x for x in sorted(nvhpc_env_vars.items())]) - run_cmd(cmd, log_all=True, simple=True) - - # make sure localrc uses GCC in PATH, not always the system GCC, and does not use a system g77 but gfortran - install_abs_subdir = os.path.join(self.installdir, self.nvhpc_install_subdir) - compilers_subdir = os.path.join(install_abs_subdir, "compilers") - makelocalrc_filename = os.path.join(compilers_subdir, "bin", "makelocalrc") - for line in fileinput.input(makelocalrc_filename, inplace='1', backup='.orig'): - line = re.sub(r"^PATH=/", r"#PATH=/", line) - sys.stdout.write(line) - - cmd = "%s -x %s -g77 /" % (makelocalrc_filename, compilers_subdir) - run_cmd(cmd, log_all=True, simple=True) - - # If an OS libnuma is NOT found, makelocalrc creates symbolic links to libpgnuma.so - # If we use the EB libnuma, delete those symbolic links to ensure they are not used - if get_software_root("numactl"): - for filename in ["libnuma.so", "libnuma.so.1"]: - path = os.path.join(compilers_subdir, "lib", filename) - if os.path.islink(path): - os.remove(path) - - if LooseVersion(self.version) < LooseVersion('21.3'): - # install (or update) siterc file to make NVHPC consider $LIBRARY_PATH - siterc_path = os.path.join(compilers_subdir, 'bin', 'siterc') - write_file(siterc_path, SITERC_LIBRARY_PATH, append=True) - self.log.info("Appended instructions to pick up $LIBRARY_PATH to siterc file at %s: %s", - siterc_path, SITERC_LIBRARY_PATH) - - # The cuda nvvp tar file has broken permissions - adjust_permissions(self.installdir, stat.S_IWUSR, add=True, onlydirs=True) - - def sanity_check_step(self): - """Custom sanity check for NVHPC""" - prefix = self.nvhpc_install_subdir - compiler_names = ['nvc', 'nvc++', 'nvfortran'] - - if LooseVersion(self.version) < LooseVersion('21.3'): - files = [os.path.join(prefix, 'compilers', 'bin', x) for x in compiler_names + ['siterc']] - else: - files = [os.path.join(prefix, 'compilers', 'bin', x) for x in compiler_names] - - custom_paths = { - 'files': files, - 'dirs': [os.path.join(prefix, 'compilers', 'bin'), os.path.join(prefix, 'compilers', 'lib'), - os.path.join(prefix, 'compilers', 'include'), os.path.join(prefix, 'compilers', 'man')] - } - custom_commands = ["%s -v" % compiler for compiler in compiler_names] - super(EB_NVHPC, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands) - - def _nvhpc_extended_components(self, dirs, basepath, env_vars_dirs): - """ - Extends `dirs` dict of key:environment_variables, value:list_of_directories with additional vars and dirs. - The dictionary key for a new env var will be created if it doesn't exist. - Also, the relative path specified in the `env_vars_dirs` dict is absolutized with the `basepath` prefix. - """ - for env_var, folders in sorted(env_vars_dirs.items()): - if env_var not in dirs: - dirs[env_var] = [] - if not isinstance(folders, list): - folders = [folders] - for folder in folders: - dirs[env_var].append(os.path.join(basepath, folder)) - - def make_module_req_guess(self): - """Prefix subdirectories in NVHPC install dir considered for environment variables defined in module file.""" - dirs = super(EB_NVHPC, self).make_module_req_guess() - for key in dirs: - dirs[key] = [os.path.join(self.nvhpc_install_subdir, 'compilers', d) for d in dirs[key]] - - # $CPATH should not be defined in module for NVHPC, it causes problems - # cfr. https://github.com/easybuilders/easybuild-easyblocks/issues/830 - if 'CPATH' in dirs: - self.log.info("Removing $CPATH entry: %s", dirs['CPATH']) - del dirs['CPATH'] - - # EasyBlock option parsing follows: - # BYO Compilers: - # Use NVHPC's libraries and tools with other, external compilers - if self.cfg['module_byo_compilers']: - if 'PATH' in dirs: - del dirs["PATH"] - # Own MPI: - # NVHPC is shipped with a compiled OpenMPI installation - # Enable it by setting according environment variables - if self.cfg['module_nvhpc_own_mpi']: - self.nvhpc_mpi_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "mpi") - env_vars_dirs = { - 'PATH': 'bin', - 'CPATH': 'include', - 'LD_LIBRARY_PATH': 'lib' - } - self._nvhpc_extended_components(dirs, self.nvhpc_mpi_basedir, env_vars_dirs) - # Math Libraries: - # NVHPC is shipped with math libraries (in a dedicated folder) - # Enable them by setting according environment variables - if self.cfg['module_add_math_libs']: - self.nvhpc_math_basedir = os.path.join(self.nvhpc_install_subdir, "math_libs") - env_vars_dirs = { - 'CPATH': 'include', - 'LD_LIBRARY_PATH': 'lib64' - } - self._nvhpc_extended_components(dirs, self.nvhpc_math_basedir, env_vars_dirs) - # GPU Profilers: - # NVHPC is shipped with NVIDIA's GPU profilers (Nsight Compute/Nsight Systems) - # Enable them by setting the according environment variables - if self.cfg['module_add_profilers']: - self.nvhpc_profilers_basedir = os.path.join(self.nvhpc_install_subdir, "profilers") - env_vars_dirs = { - 'PATH': ['Nsight_Compute', 'Nsight_Systems/bin'] - } - self._nvhpc_extended_components(dirs, self.nvhpc_profilers_basedir, env_vars_dirs) - # NCCL: - # NVHPC is shipped with NCCL - # Enable it by setting the according environment variables - if self.cfg['module_add_nccl']: - self.nvhpc_nccl_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "nccl") - env_vars_dirs = { - 'CPATH': 'include', - 'LD_LIBRARY_PATH': 'lib' - } - self._nvhpc_extended_components(dirs, self.nvhpc_nccl_basedir, env_vars_dirs) - # NVSHMEM: - # NVHPC is shipped with NVSHMEM - # Enable it by setting the according environment variables - if self.cfg['module_add_nvshmem']: - self.nvhpc_nvshmem_basedir = os.path.join(self.nvhpc_install_subdir, "comm_libs", "nvshmem") - env_vars_dirs = { - 'CPATH': 'include', - 'LD_LIBRARY_PATH': 'lib' - } - self._nvhpc_extended_components(dirs, self.nvhpc_nvshmem_basedir, env_vars_dirs) - # CUDA: - # NVHPC is shipped with CUDA (possibly multiple versions) - # Rather use this CUDA than an external CUDA (via $CUDA_HOME) by setting according environment variables - if self.cfg['module_add_cuda']: - self.nvhpc_cuda_basedir = os.path.join(self.nvhpc_install_subdir, "cuda") - env_vars_dirs = { - 'PATH': 'bin', - 'LD_LIBRARY_PATH': 'lib64', - 'CPATH': 'include' - } - self._nvhpc_extended_components(dirs, self.nvhpc_cuda_basedir, env_vars_dirs) - return dirs - - def make_module_extra(self): - """Add environment variable for NVHPC location""" - txt = super(EB_NVHPC, self).make_module_extra() - txt += self.module_generator.set_environment('NVHPC', self.installdir) - return txt diff --git a/Custom_EasyBlocks/score_p.py b/Custom_EasyBlocks/score_p.py deleted file mode 100644 index 46599a5bfe3c6a3a2de785021a2cd2e002e57a51..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/score_p.py +++ /dev/null @@ -1,140 +0,0 @@ -## -# Copyright 2013-2020 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for software using the Score-P configuration style (e.g., Cube, OTF2, Scalasca, and Score-P), -implemented as an easyblock. - -@author: Kenneth Hoste (Ghent University) -@author: Bernd Mohr (Juelich Supercomputing Centre) -@author: Markus Geimer (Juelich Supercomputing Centre) -@author: Alexander Grund (TU Dresden) -""" -import easybuild.tools.toolchain as toolchain -from easybuild.easyblocks.generic.configuremake import ConfigureMake -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.modules import get_software_root, get_software_libdir - - -class EB_Score_minus_P(ConfigureMake): - """ - Support for building and installing software using the Score-P configuration style (e.g., Cube, OTF2, Scalasca, - and Score-P). - """ - - def configure_step(self, *args, **kwargs): - """Configure the build, set configure options for compiler, MPI and dependencies.""" - # On non-cross-compile platforms, specify compiler and MPI suite explicitly. This is much quicker and safer - # than autodetection. In Score-P build-system terms, the following platforms are considered cross-compile - # architectures: - # - # - Cray XT/XE/XK/XC series - # - Fujitsu FX10, FX100 & K computer - # - IBM Blue Gene series - # - # Of those, only Cray is supported right now. - tc_fam = self.toolchain.toolchain_family() - if tc_fam != toolchain.CRAYPE: - # --with-nocross-compiler-suite=(gcc|ibm|intel|pgi|studio) - comp_opts = { - # assume that system toolchain uses a system-provided GCC - toolchain.SYSTEM: 'gcc', - toolchain.GCC: 'gcc', - toolchain.IBMCOMP: 'ibm', - toolchain.INTELCOMP: 'intel', - toolchain.PGI: 'pgi', - toolchain.NVHPC: 'pgi', - } - comp_fam = self.toolchain.comp_family() - if comp_fam in comp_opts: - self.cfg.update('configopts', "--with-nocross-compiler-suite=%s" % comp_opts[comp_fam]) - else: - raise EasyBuildError("Compiler family %s not supported yet (only: %s)", - comp_fam, ', '.join(comp_opts.keys())) - - # --with-mpi=(bullxmpi|hp|ibmpoe|intel|intel2|intelpoe|lam|mpibull2|mpich|mpich2|mpich3|openmpi| - # platform|scali|sgimpt|sun) - # - # Notes: - # - intel: Intel MPI v1.x (ancient & unsupported) - # - intel2: Intel MPI v2.x and higher - # - intelpoe: IBM POE MPI for Intel platforms - # - mpich: MPICH v1.x (ancient & unsupported) - # - mpich2: MPICH2 v1.x - # - mpich3: MPICH v3.x & MVAPICH2 - # This setting actually only affects options passed to the MPI (Fortran) compiler wrappers. - # And since MPICH v3.x-compatible options were already supported in MVAPICH2 v1.7, it is - # safe to use 'mpich3' for all supported versions although MVAPICH2 is based on MPICH v3.x - # only since v1.9b. - # - # With minimal toolchains, packages using this easyblock may be built with a non-MPI toolchain (e.g., OTF2). - # In this case, skip passing the '--with-mpi' option. - mpi_opts = { - toolchain.INTELMPI: 'intel2', - toolchain.OPENMPI: 'openmpi', - toolchain.MPICH: 'mpich3', # In EB terms, MPICH means MPICH 3.x - toolchain.MPICH2: 'mpich2', - toolchain.MVAPICH2: 'mpich3', - } - mpi_fam = self.toolchain.mpi_family() - if mpi_fam is not None: - if mpi_fam in mpi_opts: - self.cfg.update('configopts', "--with-mpi=%s" % mpi_opts[mpi_fam]) - else: - raise EasyBuildError("MPI family %s not supported yet (only: %s)", - mpi_fam, ', '.join(mpi_opts.keys())) - - # Auto-detection for dependencies mostly works fine, but hard specify paths anyway to have full control - # - # Notes: - # - binutils: Pass include/lib directories separately, as different directory layouts may break Score-P's - # configure, see https://github.com/geimer/easybuild-easyblocks/pull/4#issuecomment-219284755 - deps = { - 'binutils': ['--with-libbfd-include=%s/include', - '--with-libbfd-lib=%%s/%s' % get_software_libdir('binutils', fs=['libbfd.a'])], - 'libunwind': ['--with-libunwind=%s'], - # Older versions use Cube - 'Cube': ['--with-cube=%s/bin'], - # Recent versions of Cube are split into CubeLib and CubeW(riter) - 'CubeLib': ['--with-cubelib=%s/bin'], - 'CubeWriter': ['--with-cubew=%s/bin'], - 'CUDA': ['--enable-cuda', '--with-libcudart=%s'], - 'OTF2': ['--with-otf2=%s/bin'], - 'OPARI2': ['--with-opari2=%s/bin'], - 'PAPI': ['--with-papi-header=%s/include', '--with-papi-lib=%%s/%s' % get_software_libdir('PAPI')], - 'PDT': ['--with-pdt=%s/bin'], - 'Qt': ['--with-qt=%s'], - 'SIONlib': ['--with-sionlib=%s/bin'], - } - for (dep_name, dep_opts) in deps.items(): - dep_root = get_software_root(dep_name) - if dep_root: - for dep_opt in dep_opts: - try: - dep_opt = dep_opt % dep_root - except TypeError: - pass # Ignore subtitution error when there is nothing to substitute - self.cfg.update('configopts', dep_opt) - - super(EB_Score_minus_P, self).configure_step(*args, **kwargs) diff --git a/Custom_EasyBlocks/siesta.py b/Custom_EasyBlocks/siesta.py deleted file mode 100644 index 648096f4d06a523fc95d2b7c351105e22c19647d..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/siesta.py +++ /dev/null @@ -1,386 +0,0 @@ -## -# Copyright 2009-2018 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing Siesta, implemented as an easyblock - -@author: Miguel Dias Costa (National University of Singapore) -@author: Ake Sandgren (Umea University) -""" -import os -import stat - -import easybuild.tools.toolchain as toolchain -from distutils.version import LooseVersion -from easybuild.easyblocks.generic.configuremake import ConfigureMake -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import adjust_permissions, apply_regex_substitutions, change_dir, copy_dir, copy_file, mkdir -from easybuild.tools.modules import get_software_root -from easybuild.tools.run import run_cmd - - -class EB_Siesta(ConfigureMake): - """ - Support for building/installing Siesta. - - avoid parallel build for older versions - """ - - @staticmethod - def extra_options(extra_vars=None): - """Define extra options for Siesta""" - extra = { - 'with_transiesta': [True, "Build transiesta", CUSTOM], - 'with_utils': [True, "Build all utils", CUSTOM], - } - return ConfigureMake.extra_options(extra_vars=extra) - - def configure_step(self): - """ - Custom configure and build procedure for Siesta. - - There are two main builds to do, siesta and transiesta - - In addition there are multiple support tools to build - """ - - start_dir = self.cfg['start_dir'] - obj_dir = os.path.join(start_dir, 'Obj') - arch_make = os.path.join(obj_dir, 'arch.make') - bindir = os.path.join(start_dir, 'bin') - - loose_ver = LooseVersion(self.version) - - par = '' - if loose_ver >= LooseVersion('4.1'): - par = '-j %s' % self.cfg['parallel'] - - # enable OpenMP support if desired - env_var_suff = '' - if self.toolchain.options.get('openmp', None): - env_var_suff = '_MT' - - scalapack = os.environ['LIBSCALAPACK' + env_var_suff] - blacs = os.environ['LIBSCALAPACK' + env_var_suff] - lapack = os.environ['LIBLAPACK' + env_var_suff] - blas = os.environ['LIBBLAS' + env_var_suff] - if get_software_root('imkl') or get_software_root('FFTW'): - fftw = os.environ['LIBFFT' + env_var_suff] - else: - fftw = None - - regex_newlines = [] - regex_subs = [ - ('dc_lapack.a', ''), - (r'^NETCDF_INTERFACE\s*=.*$', ''), - ('libsiestaBLAS.a', ''), - ('libsiestaLAPACK.a', ''), - # Needed here to allow 4.1-b1 to be built with openmp - (r"^(LDFLAGS\s*=).*$", r"\1 %s %s" % (os.environ['FCFLAGS'], os.environ['LDFLAGS'])), - ] - - netcdff_loc = get_software_root('netCDF-Fortran') - if netcdff_loc: - # Needed for gfortran at least - regex_newlines.append((r"^(ARFLAGS_EXTRA\s*=.*)$", r"\1\nNETCDF_INCFLAGS = -I%s/include" % netcdff_loc)) - - if fftw: - fft_inc, fft_lib = os.environ['FFT_INC_DIR'], os.environ['FFT_LIB_DIR'] - fppflags = r"\1\nFFTW_INCFLAGS = -I%s\nFFTW_LIBS = -L%s %s" % (fft_inc, fft_lib, fftw) - regex_newlines.append((r'(FPPFLAGS\s*=.*)$', fppflags)) - - # Make a temp installdir during the build of the various parts - mkdir(bindir) - - # change to actual build dir - change_dir(obj_dir) - - # Populate start_dir with makefiles - run_cmd(os.path.join(start_dir, 'Src', 'obj_setup.sh'), log_all=True, simple=True, log_output=True) - - if loose_ver < LooseVersion('4.1-b2'): - # MPI? - if self.toolchain.options.get('usempi', None): - self.cfg.update('configopts', '--enable-mpi') - - # BLAS and LAPACK - self.cfg.update('configopts', '--with-blas="%s"' % blas) - self.cfg.update('configopts', '--with-lapack="%s"' % lapack) - - # ScaLAPACK (and BLACS) - self.cfg.update('configopts', '--with-scalapack="%s"' % scalapack) - self.cfg.update('configopts', '--with-blacs="%s"' % blacs) - - # NetCDF-Fortran - if netcdff_loc: - self.cfg.update('configopts', '--with-netcdf=-lnetcdff') - - # Configure is run in obj_dir, configure script is in ../Src - super(EB_Siesta, self).configure_step(cmd_prefix='../Src/') - - if loose_ver > LooseVersion('4.0'): - regex_subs_Makefile = [ - (r'CFLAGS\)-c', r'CFLAGS) -c'), - ] - apply_regex_substitutions('Makefile', regex_subs_Makefile) - - else: # there's no configure on newer versions - - if self.toolchain.comp_family() in [toolchain.INTELCOMP]: - copy_file(os.path.join(obj_dir, 'intel.make'), arch_make) - elif self.toolchain.comp_family() in [toolchain.GCC]: - copy_file(os.path.join(obj_dir, 'gfortran.make'), arch_make) - else: - raise EasyBuildError("There is currently no support for compiler: %s", self.toolchain.comp_family()) - - if self.toolchain.options.get('usempi', None): - regex_subs.extend([ - (r"^(CC\s*=\s*).*$", r"\1%s" % os.environ['MPICC']), - (r"^(FC\s*=\s*).*$", r"\1%s" % os.environ['MPIF90']), - (r"^(FPPFLAGS\s*=.*)$", r"\1 -DMPI"), - ]) - regex_newlines.append((r"^(FPPFLAGS\s*=.*)$", r"\1\nMPI_INTERFACE = libmpi_f90.a\nMPI_INCLUDE = .")) - complibs = scalapack - else: - complibs = lapack - - regex_subs.extend([ - (r"^(LIBS\s*=).*$", r"\1 %s" % complibs), - # Needed for a couple of the utils - (r"^(FFLAGS\s*=\s*).*$", r"\1 -fPIC %s" % os.environ['FCFLAGS']), - ]) - regex_newlines.append((r"^(COMP_LIBS\s*=.*)$", r"\1\nWXML = libwxml.a")) - - if netcdff_loc: - regex_subs.extend([ - (r"^(LIBS\s*=.*)$", r"\1 $(NETCDF_LIBS)"), - (r"^(FPPFLAGS\s*=.*)$", r"\1 -DCDF $(NETCDF_INCLUDE)"), - ]) - netcdf_lib_and_inc = "NETCDF_LIBS = -lnetcdff\nNETCDF_INCLUDE = -I%s/include" % netcdff_loc - regex_newlines.append((r"^(COMP_LIBS\s*=.*)$", r"\1\n%s" % netcdf_lib_and_inc)) - - apply_regex_substitutions(arch_make, regex_subs) - - # individually apply substitutions that add lines - for regex_nl in regex_newlines: - apply_regex_substitutions(arch_make, [regex_nl]) - - run_cmd('make %s' % par, log_all=True, simple=True, log_output=True) - - # Put binary in temporary install dir - copy_file(os.path.join(obj_dir, 'siesta'), bindir) - - if self.cfg['with_utils']: - # Make the utils - change_dir(os.path.join(start_dir, 'Util')) - - if loose_ver >= LooseVersion('4'): - # clean_all.sh might be missing executable bit... - adjust_permissions('./clean_all.sh', stat.S_IXUSR, recursive=False, relative=True) - run_cmd('./clean_all.sh', log_all=True, simple=True, log_output=True) - - if loose_ver >= LooseVersion('4.1'): - regex_subs_TS = [ - (r"^default:.*$", r""), - (r"^EXE\s*=.*$", r""), - (r"^(include\s*..ARCH_MAKE.*)$", r"EXE=tshs2tshs\ndefault: $(EXE)\n\1"), - (r"^(INCFLAGS.*)$", r"\1 -I%s" % obj_dir), - ] - - makefile = os.path.join(start_dir, 'Util', 'TS', 'tshs2tshs', 'Makefile') - apply_regex_substitutions(makefile, regex_subs_TS) - - if loose_ver >= LooseVersion('4'): - # SUFFIX rules in wrong place - regex_subs_suffix = [ - (r'^(\.SUFFIXES:.*)$', r''), - (r'^(include\s*\$\(ARCH_MAKE\).*)$', r'\1\n.SUFFIXES:\n.SUFFIXES: .c .f .F .o .a .f90 .F90'), - ] - makefile = os.path.join(start_dir, 'Util', 'Sockets', 'Makefile') - apply_regex_substitutions(makefile, regex_subs_suffix) - makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine', 'SimpleTest', 'Src', 'Makefile') - apply_regex_substitutions(makefile, regex_subs_suffix) - - regex_subs_UtilLDFLAGS = [ - (r'(\$\(FC\)\s*-o\s)', r'$(FC) %s %s -o ' % (os.environ['FCFLAGS'], os.environ['LDFLAGS'])), - ] - makefile = os.path.join(start_dir, 'Util', 'Optimizer', 'Makefile') - apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS) - if loose_ver >= LooseVersion('4'): - makefile = os.path.join(start_dir, 'Util', 'JobList', 'Src', 'Makefile') - apply_regex_substitutions(makefile, regex_subs_UtilLDFLAGS) - - # remove clean at the end of default target - # And yes, they are re-introducing this bug. - is_ver40_to_401 = loose_ver >= LooseVersion('4.0') and loose_ver < LooseVersion('4.0.2') - if (is_ver40_to_401 or loose_ver == LooseVersion('4.1-b3')): - makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine', 'SimpleTest', 'Src', 'Makefile') - apply_regex_substitutions(makefile, [(r"simple_mpi_parallel clean", r"simple_mpi_parallel")]) - makefile = os.path.join(start_dir, 'Util', 'SiestaSubroutine', 'ProtoNEB', 'Src', 'Makefile') - apply_regex_substitutions(makefile, [(r"protoNEB clean", r"protoNEB")]) - - # build_all.sh might be missing executable bit... - adjust_permissions('./build_all.sh', stat.S_IXUSR, recursive=False, relative=True) - run_cmd('./build_all.sh', log_all=True, simple=True, log_output=True) - - # Now move all the built utils to the temp installdir - expected_utils = [ - 'CMLComp/ccViz', - 'Contrib/APostnikov/eig2bxsf', 'Contrib/APostnikov/fmpdos', - 'Contrib/APostnikov/md2axsf', 'Contrib/APostnikov/rho2xsf', - 'Contrib/APostnikov/vib2xsf', 'Contrib/APostnikov/xv2xsf', - 'COOP/fat', 'COOP/mprop', - 'Denchar/Src/denchar', - 'DensityMatrix/cdf2dm', 'DensityMatrix/dm2cdf', - 'Eig2DOS/Eig2DOS', - 'Gen-basis/gen-basis', 'Gen-basis/ioncat', - 'Gen-basis/ionplot.sh', - 'Grid/cdf2grid', 'Grid/cdf2xsf', 'Grid/cdf_laplacian', - 'Grid/g2c_ng', 'Grid/grid2cdf', 'Grid/grid2cube', - 'Grid/grid2val', 'Grid/grid_rotate', - 'Helpers/get_chem_labels', - 'HSX/hs2hsx', 'HSX/hsx2hs', - 'JobList/Src/countJobs', 'JobList/Src/getResults', - 'JobList/Src/horizontal', 'JobList/Src/runJobs', - 'Macroave/Src/macroave', - 'ON/lwf2cdf', - 'Optimizer/simplex', 'Optimizer/swarm', - 'pdosxml/pdosxml', - 'Projections/orbmol_proj', - 'SiestaSubroutine/FmixMD/Src/driver', - 'SiestaSubroutine/FmixMD/Src/para', - 'SiestaSubroutine/FmixMD/Src/simple', - 'STM/ol-stm/Src/stm', 'STM/simple-stm/plstm', - 'VCA/fractional', 'VCA/mixps', - 'Vibra/Src/fcbuild', 'Vibra/Src/vibra', - 'WFS/info_wfsx', - 'WFS/readwf', 'WFS/readwfx', 'WFS/wfs2wfsx', - 'WFS/wfsnc2wfsx', 'WFS/wfsx2wfs', - ] - - if loose_ver >= LooseVersion('3.2'): - expected_utils.extend([ - 'Bands/eigfat2plot', - ]) - - if loose_ver >= LooseVersion('4.0'): - expected_utils.extend([ - 'SiestaSubroutine/ProtoNEB/Src/protoNEB', - 'SiestaSubroutine/SimpleTest/Src/simple_pipes_parallel', - 'SiestaSubroutine/SimpleTest/Src/simple_pipes_serial', - 'SiestaSubroutine/SimpleTest/Src/simple_sockets_parallel', - 'SiestaSubroutine/SimpleTest/Src/simple_sockets_serial', - 'Sockets/f2fmaster', 'Sockets/f2fslave', - ]) - if self.toolchain.options.get('usempi', None): - expected_utils.extend([ - 'SiestaSubroutine/SimpleTest/Src/simple_mpi_parallel', - 'SiestaSubroutine/SimpleTest/Src/simple_mpi_serial', - ]) - - if loose_ver < LooseVersion('4.1'): - if loose_ver >= LooseVersion('4.0'): - expected_utils.extend([ - 'COOP/dm_creator', - 'TBTrans_rep/tbtrans', - ]) - else: - expected_utils.extend([ - 'TBTrans/tbtrans', - ]) - - if loose_ver < LooseVersion('4.0.2'): - expected_utils.extend([ - 'Bands/new.gnubands', - ]) - else: - expected_utils.extend([ - 'Bands/gnubands', - ]) - # Need to revisit this when 4.1 is officialy released. - # This is based on b1-b3 releases - if loose_ver < LooseVersion('4.1'): - expected_utils.extend([ - 'Contour/grid1d', 'Contour/grid2d', - 'Optical/optical', 'Optical/optical_input', - 'sies2arc/sies2arc', - ]) - - if loose_ver >= LooseVersion('4.1'): - expected_utils.extend([ - 'DensityMatrix/dmbs2dm', 'DensityMatrix/dmUnblock', - 'Grimme/fdf2grimme', - 'SpPivot/pvtsp', - 'TS/TBtrans/tbtrans', 'TS/tselecs.sh', - 'TS/ts2ts/ts2ts', 'TS/tshs2tshs/tshs2tshs', - ]) - - for util in expected_utils: - copy_file(os.path.join(start_dir, 'Util', util), bindir) - - if self.cfg['with_transiesta']: - # Build transiesta - change_dir(obj_dir) - - run_cmd('make clean', log_all=True, simple=True, log_output=True) - run_cmd('make %s transiesta' % par, log_all=True, simple=True, log_output=True) - - copy_file(os.path.join(obj_dir, 'transiesta'), bindir) - - def build_step(self): - """No build step for Siesta.""" - pass - - def install_step(self): - """Custom install procedure for Siesta: copy binaries.""" - bindir = os.path.join(self.installdir, 'bin') - copy_dir(os.path.join(self.cfg['start_dir'], 'bin'), bindir) - - def sanity_check_step(self): - """Custom sanity check for Siesta.""" - - bins = ['bin/siesta'] - - if self.cfg['with_transiesta']: - bins.append('bin/transiesta') - - if self.cfg['with_utils']: - bins.append('bin/denchar') - - custom_paths = { - 'files': bins, - 'dirs': [], - } - custom_commands = [] - if self.toolchain.options.get('usempi', None): - environment = '' - # To allow the test to run in the wrong context - if self.toolchain.mpi_family() == toolchain.INTELMPI: - environment = 'export I_MPI_FABRICS=shm && ' - # make sure Siesta was indeed built with support for running in parallel - # The "cd to builddir" is required to not contaminate the install dir with cruft from running siesta - mpi_test_cmd = environment - mpi_test_cmd += "cd %s && " % self.builddir - mpi_test_cmd += "echo 'SystemName test' | siesta 2>/dev/null | grep PARALLEL" - custom_commands.append(mpi_test_cmd) - - super(EB_Siesta, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands) diff --git a/Custom_EasyBlocks/tensorflow.py b/Custom_EasyBlocks/tensorflow.py deleted file mode 100644 index 822642cbcbb64134d6303719f8ff1dcb54d3df89..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/tensorflow.py +++ /dev/null @@ -1,899 +0,0 @@ -## -# Copyright 2017-2020 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for building and installing TensorFlow, implemented as an easyblock - -@author: Kenneth Hoste (HPC-UGent) -@author: Ake Sandgren (Umea University) -@author: Damian Alvarez (Forschungzentrum Juelich GmbH) -""" -import glob -import os -import re -import stat -import tempfile -import json -from distutils.version import LooseVersion - -import easybuild.tools.environment as env -import easybuild.tools.toolchain as toolchain -from easybuild.easyblocks.generic.pythonpackage import PythonPackage, det_python_version -from easybuild.easyblocks.python import EXTS_FILTER_PYTHON_PACKAGES -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools import run -from easybuild.tools.build_log import EasyBuildError, print_warning -from easybuild.tools.config import build_option -from easybuild.tools.filetools import adjust_permissions, apply_regex_substitutions, copy_file, mkdir, resolve_path -from easybuild.tools.filetools import is_readable, read_file, which, write_file, remove_file -from easybuild.tools.modules import get_software_root, get_software_version, get_software_libdir -from easybuild.tools.run import run_cmd -from easybuild.tools.systemtools import X86_64, get_cpu_architecture, get_os_name, get_os_version -from easybuild.tools.py2vs3 import subprocess_popen_text - - -# Wrapper for Intel(MPI) compilers, where required environment variables -# are hardcoded to make sure they are present; -# this is required because Bazel resets the environment in which -# compiler commands are executed... -INTEL_COMPILER_WRAPPER = """#!/bin/bash - -export CPATH='%(cpath)s' - -# Only relevant for Intel compilers. -export INTEL_LICENSE_FILE='%(intel_license_file)s' - -# Only relevant for MPI compiler wrapper (mpiicc/mpicc etc), -# not for regular compiler. -export I_MPI_ROOT='%(intel_mpi_root)s' - -# Exclude location of this wrapper from $PATH to avoid other potential -# wrappers calling this wrapper. -export PATH=$(echo $PATH | tr ':' '\n' | grep -v "^%(wrapper_dir)s$" | tr '\n' ':') - -%(compiler_path)s "$@" -""" - - -def split_tf_libs_txt(valid_libs_txt): - """Split the VALID_LIBS entry from the TF file into single names""" - entries = valid_libs_txt.split(',') - # Remove double quotes and whitespace - result = [entry.strip().strip('"') for entry in entries] - # Remove potentially trailing empty element due to trailing comma in the txt - if not result[-1]: - result.pop() - return result - - -def get_system_libs_from_tf(source_dir): - """Return the valid values for TF_SYSTEM_LIBS from the TensorFlow source directory""" - syslibs_path = os.path.join(source_dir, 'third_party', 'systemlibs', 'syslibs_configure.bzl') - result = [] - if os.path.exists(syslibs_path): - txt = read_file(syslibs_path) - valid_libs_match = re.search(r'VALID_LIBS\s*=\s*\[(.*?)\]', txt, re.DOTALL) - if not valid_libs_match: - raise EasyBuildError('VALID_LIBS definition not found in %s', syslibs_path) - result = split_tf_libs_txt(valid_libs_match.group(1)) - return result - - -def get_system_libs_for_version(tf_version, as_valid_libs=False): - """ - Determine valid values for $TF_SYSTEM_LIBS for the given TF version - - If as_valid_libs=False (default) then returns 2 dictioniaries: - 1: Mapping of <EB name> to <TF name> - 2: Mapping of <package name> to <TF name> (for python extensions) - else returns a string formated like the VALID_LIBS variable in third_party/systemlibs/syslibs_configure.bzl - Those can be used to check/diff against third_party/systemlibs/syslibs_configure.bzl by running: - python -c 'from easybuild.easyblocks.tensorflow import get_system_libs_for_version; \ - print(get_system_libs_for_version("2.1.0", as_valid_libs=True))' - """ - tf_version = LooseVersion(tf_version) - - def is_version_ok(version_range): - """Return True if the TF version to be installed matches the version_range""" - min_version, max_version = version_range.split(':') - result = True - if min_version and tf_version < LooseVersion(min_version): - result = False - if max_version and tf_version >= LooseVersion(max_version): - result = False - return result - - # For these lists check third_party/systemlibs/syslibs_configure.bzl --> VALID_LIBS - # Also verify third_party/systemlibs/<name>.BUILD or third_party/systemlibs/<name>/BUILD.system - # if it does something "strange" (e.g. link hardcoded headers) - - # Software which is added as a dependency in the EC - available_system_libs = { - # Format: (<EB name>, <version range>): <TF name> - # <version range> is '<min version>:<exclusive max version>' - ('cURL', '2.0.0:'): 'curl', - ('double-conversion', '2.0.0:'): 'double_conversion', - ('flatbuffers', '2.0.0:'): 'flatbuffers', - ('giflib', '2.0.0:2.1.0'): 'gif_archive', - ('giflib', '2.1.0:'): 'gif', - ('hwloc', '2.0.0:'): 'hwloc', - ('ICU', '2.0.0:'): 'icu', - ('JsonCpp', '2.0.0:'): 'jsoncpp_git', - ('libjpeg-turbo', '2.0.0:2.2.0'): 'jpeg', - ('libjpeg-turbo', '2.2.0:'): 'libjpeg_turbo', - ('libpng', '2.0.0:2.1.0'): 'png_archive', - ('libpng', '2.1.0:'): 'png', - ('LMDB', '2.0.0:'): 'lmdb', - ('NASM', '2.0.0:'): 'nasm', - ('nsync', '2.0.0:'): 'nsync', - ('PCRE', '2.0.0:'): 'pcre', - ('protobuf-python', '2.0.0:'): 'com_google_protobuf', - ('pybind11', '2.2.0:'): 'pybind11', - ('snappy', '2.0.0:'): 'snappy', - ('SQLite', '2.0.0:'): 'org_sqlite', - ('SWIG', '2.0.0:'): 'swig', - ('zlib', '2.0.0:2.2.0'): 'zlib_archive', - ('zlib', '2.2.0:'): 'zlib', - } - # Software recognized by TF but which is always disabled (usually because no EC is known) - # Format: <TF name>: <version range> - unused_system_libs = { - 'boringssl': '2.0.0:', - 'com_github_googleapis_googleapis': '2.0.0:', - 'com_github_googlecloudplatform_google_cloud_cpp': '2.0.0:', # Not used due to $TF_NEED_GCP=0 - 'com_github_grpc_grpc': '2.2.0:', - 'com_googlesource_code_re2': '2.0.0:', - 'grpc': '2.0.0:2.2.0', - } - # Python packages installed as extensions or in the Python module - # Will be checked for availabilitly - # Format: (<package name>, <version range>): <TF name> - python_system_libs = { - ('absl', '2.0.0:'): 'absl_py', - ('astor', '2.0.0:'): 'astor_archive', - ('astunparse', '2.2.0:'): 'astunparse_archive', - ('cython', '2.0.0:'): 'cython', # Part of Python EC - ('enum', '2.0.0:'): 'enum34_archive', # Part of Python3 - ('functools', '2.0.0:'): 'functools32_archive', # Part of Python3 - ('gast', '2.0.0:'): 'gast_archive', - ('keras_applications', '2.0.0:2.2.0'): 'keras_applications_archive', - ('opt_einsum', '2.0.0:'): 'opt_einsum_archive', - ('pasta', '2.0.0:'): 'pasta', - ('six', '2.0.0:'): 'six_archive', # Part of Python EC - ('termcolor', '2.0.0:'): 'termcolor_archive', - ('wrapt', '2.0.0:'): 'wrapt', - } - dependency_mapping = dict((dep_name, tf_name) - for (dep_name, version_range), tf_name in available_system_libs.items() - if is_version_ok(version_range)) - python_mapping = dict((pkg_name, tf_name) - for (pkg_name, version_range), tf_name in python_system_libs.items() - if is_version_ok(version_range)) - - if as_valid_libs: - tf_names = [tf_name for tf_name, version_range in unused_system_libs.items() - if is_version_ok(version_range)] - tf_names.extend(dependency_mapping.values()) - tf_names.extend(python_mapping.values()) - result = '\n'.join([' "%s",' % name for name in sorted(tf_names)]) - else: - result = dependency_mapping, python_mapping - return result - - -class EB_TensorFlow(PythonPackage): - """Support for building/installing TensorFlow.""" - - @staticmethod - def extra_options(): - # We only want to install mkl-dnn by default on x86_64 systems - with_mkl_dnn_default = get_cpu_architecture() == X86_64 - extra_vars = { - # see https://developer.nvidia.com/cuda-gpus - 'cuda_compute_capabilities': [[], "List of CUDA compute capabilities to build with", CUSTOM], - 'path_filter': [[], "List of patterns to be filtered out in paths in $CPATH and $LIBRARY_PATH", CUSTOM], - 'with_jemalloc': [None, "Make TensorFlow use jemalloc (usually enabled by default)", CUSTOM], - 'with_mkl_dnn': [with_mkl_dnn_default, "Make TensorFlow use Intel MKL-DNN", CUSTOM], - 'with_xla': [None, "Enable XLA JIT compiler for possible runtime optimization of models", CUSTOM], - 'test_script': [None, "Script to test TensorFlow installation with", CUSTOM], - } - - return PythonPackage.extra_options(extra_vars) - - def __init__(self, *args, **kwargs): - """Initialize TensorFlow easyblock.""" - super(EB_TensorFlow, self).__init__(*args, **kwargs) - - self.cfg['exts_defaultclass'] = 'PythonPackage' - - self.cfg['exts_default_options'] = { - 'download_dep_fail': True, - 'use_pip': True, - } - self.cfg['exts_filter'] = EXTS_FILTER_PYTHON_PACKAGES - self.system_libs_info = None - - self.test_script = None - - # locate test script (if specified) - if self.cfg['test_script']: - # try to locate test script via obtain_file (just like sources & patches files) - self.test_script = self.obtain_file(self.cfg['test_script']) - if self.test_script and os.path.exists(self.test_script): - self.log.info("Test script found: %s", self.test_script) - else: - raise EasyBuildError("Specified test script %s not found!", self.cfg['test_script']) - - def python_pkg_exists(self, name): - """Check if the given python package exists/can be imported""" - cmd = [self.python_cmd, '-c', 'import %s' % name] - out, ec = run_cmd(cmd, log_ok=False) - self.log.debug('Existence check for %s returned %s with output: %s', name, ec, out) - return ec == 0 - - def get_installed_python_packages(self): - """Return list of Python package names that are installed - - Note that the names are reported by pip and might be different to the name that needs to be used to import it - """ - # Check installed python packages but only check stdout, not stderr which might contain user facing warnings - cmd_list = [self.python_cmd, '-m', 'pip', 'list', '--isolated', '--disable-pip-version-check', - '--format', 'json'] - full_cmd = ' '.join(cmd_list) - self.log.info("Running command '%s'" % full_cmd) - proc = subprocess_popen_text(cmd_list, env=os.environ) - (stdout, stderr) = proc.communicate() - ec = proc.returncode - self.log.info("Command '%s' returned with %s: stdout: %s; stderr: %s" % (full_cmd, ec, stdout, stderr)) - if ec: - raise EasyBuildError('Failed to determine installed python packages: %s', stderr) - - return [pkg['name'] for pkg in json.loads(stdout.strip())] - - def handle_jemalloc(self): - """Figure out whether jemalloc support should be enabled or not.""" - if self.cfg['with_jemalloc'] is None: - if LooseVersion(self.version) > LooseVersion('1.6'): - # jemalloc bundled with recent versions of TensorFlow does not work on RHEL 6 or derivatives, - # so disable it automatically if with_jemalloc was left unspecified - os_name = get_os_name().replace(' ', '') - rh_based_os = any(os_name.startswith(x) for x in ['centos', 'redhat', 'rhel', 'sl']) - if rh_based_os and get_os_version().startswith('6.'): - self.log.info("Disabling jemalloc since bundled jemalloc does not work on RHEL 6 and derivatives") - self.cfg['with_jemalloc'] = False - - # if the above doesn't disable jemalloc support, then enable it by default - if self.cfg['with_jemalloc'] is None: - self.log.info("Enabling jemalloc support by default, since it was left unspecified") - self.cfg['with_jemalloc'] = True - - else: - # if with_jemalloc was specified, stick to that - self.log.info("with_jemalloc was specified as %s, so sticking to it", self.cfg['with_jemalloc']) - - def write_wrapper(self, wrapper_dir, compiler, i_mpi_root): - """Helper function to write a compiler wrapper.""" - wrapper_txt = INTEL_COMPILER_WRAPPER % { - 'compiler_path': which(compiler), - 'intel_mpi_root': i_mpi_root, - 'cpath': os.getenv('CPATH'), - 'intel_license_file': os.getenv('INTEL_LICENSE_FILE', os.getenv('LM_LICENSE_FILE')), - 'wrapper_dir': wrapper_dir, - } - wrapper = os.path.join(wrapper_dir, compiler) - write_file(wrapper, wrapper_txt) - if self.dry_run: - self.dry_run_msg("Wrapper for '%s' was put in place: %s", compiler, wrapper) - else: - adjust_permissions(wrapper, stat.S_IXUSR) - self.log.info("Using wrapper script for '%s': %s", compiler, which(compiler)) - - def verify_system_libs_info(self): - """Verifies that the stored info about $TF_SYSTEM_LIBS is complete""" - available_libs_src = set(get_system_libs_from_tf(self.start_dir)) - available_libs_eb = set(split_tf_libs_txt(get_system_libs_for_version(self.version, as_valid_libs=True))) - # If available_libs_eb is empty it is not an error e.g. it is not worth trying to make all old ECs work - # So we just log it so it can be verified manually if required - if not available_libs_eb: - self.log.warning('TensorFlow EasyBlock does not have any information for $TF_SYSTEM_LIBS stored. ' + - 'This means most dependencies will be downloaded at build time by TensorFlow.\n' + - 'Available $TF_SYSTEM_LIBS according to the TensorFlow sources: %s', - sorted(available_libs_src)) - return - # Those 2 sets should be equal. We determine the differences here to report better errors - missing_libs = available_libs_src - available_libs_eb - unknown_libs = available_libs_eb - available_libs_src - if missing_libs or unknown_libs: - if not available_libs_src: - msg = 'Failed to determine available $TF_SYSTEM_LIBS from the source' - else: - msg = 'Values for $TF_SYSTEM_LIBS in the TensorFlow EasyBlock are incomplete.\n' - if missing_libs: - # Libs available according to TF sources but not listed in this EasyBlock - msg += 'Missing entries for $TF_SYSTEM_LIBS: %s\n' % missing_libs - if unknown_libs: - # Libs listed in this EasyBlock but not present in the TF sources -> Removed? - msg += 'Unrecognized entries for $TF_SYSTEM_LIBS: %s\n' % unknown_libs - msg += 'The EasyBlock needs to be updated to fully work with TensorFlow version %s' % self.version - if build_option('strict') == run.ERROR: - raise EasyBuildError(msg) - else: - print_warning(msg) - - def get_system_libs(self): - """ - Get list of dependencies for $TF_SYSTEM_LIBS - - Returns a tuple of lists: $TF_SYSTEM_LIBS names, include paths, library paths - """ - dependency_mapping, python_mapping = get_system_libs_for_version(self.version) - - system_libs = [] - cpaths = [] - libpaths = [] - ignored_system_deps = [] - - # Check direct dependencies - dep_names = set(dep['name'] for dep in self.cfg.dependencies()) - for dep_name, tf_name in sorted(dependency_mapping.items(), key=lambda i: i[0].lower()): - if dep_name in dep_names: - system_libs.append(tf_name) - # When using cURL (which uses the system OpenSSL), we also need to use "boringssl" - # which essentially resolves to using OpenSSL as the API and library names are compatible - if dep_name == 'cURL': - system_libs.append('boringssl') - # For protobuf we need protobuf and protobuf-python where the latter depends on the former - # For includes etc. we need to get the values from protobuf - if dep_name == 'protobuf-python': - dep_name = 'protobuf' - sw_root = get_software_root(dep_name) - # Dependency might be filtered via --filter-deps. In that case assume globally installed version - if not sw_root: - continue - incpath = os.path.join(sw_root, 'include') - if os.path.exists(incpath): - cpaths.append(incpath) - if dep_name == 'JsonCpp' and LooseVersion(self.version) < LooseVersion('2.3'): - # Need to add the install prefix or patch the sources: - # https://github.com/tensorflow/tensorflow/issues/42303 - cpaths.append(sw_root) - if dep_name == 'protobuf': - # Need to set INCLUDEDIR as TF wants to symlink headers from there: - # https://github.com/tensorflow/tensorflow/issues/37835 - env.setvar('INCLUDEDIR', incpath) - libpath = get_software_libdir(dep_name) - if libpath: - libpaths.append(os.path.join(sw_root, libpath)) - else: - ignored_system_deps.append('%s (Dependency %s)' % (tf_name, dep_name)) - - for pkg_name, tf_name in sorted(python_mapping.items(), key=lambda i: i[0].lower()): - if self.python_pkg_exists(pkg_name): - system_libs.append(tf_name) - else: - ignored_system_deps.append('%s (Python package %s)' % (tf_name, pkg_name)) - - if ignored_system_deps: - self.log.warning('For the following $TF_SYSTEM_LIBS dependencies TensorFlow will download a copy ' + - 'because an EB dependency was not found: \n%s\n' + - 'EC Dependencies: %s\n' + - 'Installed Python packages: %s\n', - ', '.join(ignored_system_deps), - ', '.join(dep_names), - ', '.join(self.get_installed_python_packages())) - else: - self.log.info("All known TensorFlow $TF_SYSTEM_LIBS dependencies resolved via EasyBuild!") - - return system_libs, cpaths, libpaths - - def setup_build_dirs(self): - """Setup temporary build directories""" - # Tensorflow/Bazel needs a couple of directories where it stores build cache and artefacts - tmpdir = tempfile.mkdtemp(suffix='-bazel-tf', dir=self.builddir) - self.output_root_dir = os.path.join(tmpdir, 'output_root') - self.output_base_dir = os.path.join(tmpdir, 'output_base') - self.output_user_root_dir = os.path.join(tmpdir, 'output_user_root') - self.wrapper_dir = os.path.join(tmpdir, 'wrapper_bin') - # This (likely) needs to be a subdir of output_base - self.install_base_dir = os.path.join(self.output_base_dir, 'inst_base') - - def configure_step(self): - """Custom configuration procedure for TensorFlow.""" - - binutils_root = get_software_root('binutils') - if not binutils_root: - raise EasyBuildError("Failed to determine installation prefix for binutils") - self.binutils_bin_path = os.path.join(binutils_root, 'bin') - - # filter out paths from CPATH and LIBRARY_PATH. This is needed since bazel will pull some dependencies that - # might conflict with dependencies on the system and/or installed with EB. For example: protobuf - path_filter = self.cfg['path_filter'] - if path_filter: - self.log.info("Filtering $CPATH and $LIBRARY_PATH with path filter %s", path_filter) - for var in ['CPATH', 'LIBRARY_PATH']: - path = os.getenv(var).split(os.pathsep) - self.log.info("$%s old value was %s" % (var, path)) - filtered_path = os.pathsep.join([p for fil in path_filter for p in path if fil not in p]) - env.setvar(var, filtered_path) - - self.setup_build_dirs() - - use_wrapper = False - if self.toolchain.comp_family() == toolchain.INTELCOMP: - # put wrappers for Intel C/C++ compilers in place (required to make sure license server is found) - # cfr. https://github.com/bazelbuild/bazel/issues/663 - for compiler in ('icc', 'icpc'): - self.write_wrapper(self.wrapper_dir, compiler, 'NOT-USED-WITH-ICC') - use_wrapper = True - - use_mpi = self.toolchain.options.get('usempi', False) - mpi_home = '' - if use_mpi: - impi_root = get_software_root('impi') - if impi_root: - # put wrappers for Intel MPI compiler wrappers in place - # (required to make sure license server and I_MPI_ROOT are found) - for compiler in (os.getenv('MPICC'), os.getenv('MPICXX')): - self.write_wrapper(self.wrapper_dir, compiler, os.getenv('I_MPI_ROOT')) - use_wrapper = True - # set correct value for MPI_HOME - mpi_home = os.path.join(impi_root, 'intel64') - else: - self.log.debug("MPI module name: %s", self.toolchain.MPI_MODULE_NAME[0]) - mpi_home = get_software_root(self.toolchain.MPI_MODULE_NAME[0]) - - self.log.debug("Derived value for MPI_HOME: %s", mpi_home) - - if use_wrapper: - env.setvar('PATH', os.pathsep.join([self.wrapper_dir, os.getenv('PATH')])) - - self.prepare_python() - self.handle_jemalloc() - - self.verify_system_libs_info() - self.system_libs_info = self.get_system_libs() - - cuda_root = get_software_root('CUDA') - cudnn_root = get_software_root('cuDNN') - opencl_root = get_software_root('OpenCL') - tensorrt_root = get_software_root('TensorRT') - nccl_root = get_software_root('NCCL') - - config_env_vars = { - 'CC_OPT_FLAGS': os.getenv('CXXFLAGS'), - 'MPI_HOME': mpi_home, - 'PYTHON_BIN_PATH': self.python_cmd, - 'PYTHON_LIB_PATH': os.path.join(self.installdir, self.pylibdir), - 'TF_CUDA_CLANG': '0', - 'TF_ENABLE_XLA': ('0', '1')[bool(self.cfg['with_xla'])], # XLA JIT support - 'TF_NEED_CUDA': ('0', '1')[bool(cuda_root)], - 'TF_NEED_GCP': '0', # Google Cloud Platform - 'TF_NEED_GDR': '0', - 'TF_NEED_HDFS': '0', # Hadoop File System - 'TF_NEED_JEMALLOC': ('0', '1')[self.cfg['with_jemalloc']], - 'TF_NEED_MPI': ('0', '1')[bool(use_mpi)], - 'TF_NEED_OPENCL': ('0', '1')[bool(opencl_root)], - 'TF_NEED_OPENCL_SYCL': '0', - 'TF_NEED_ROCM': '0', - 'TF_NEED_S3': '0', # Amazon S3 File System - 'TF_NEED_TENSORRT': '0', - 'TF_NEED_VERBS': '0', - 'TF_NEED_AWS': '0', # Amazon AWS Platform - 'TF_NEED_KAFKA': '0', # Amazon Kafka Platform - 'TF_SET_ANDROID_WORKSPACE': '0', - 'TF_DOWNLOAD_CLANG': '0', # Still experimental in TF 2.1.0 - 'TF_SYSTEM_LIBS': ','.join(self.system_libs_info[0]), - } - if cuda_root: - cuda_version = get_software_version('CUDA') - cuda_maj_min_ver = '.'.join(cuda_version.split('.')[:2]) - - # $GCC_HOST_COMPILER_PATH should be set to path of the actual compiler (not the MPI compiler wrapper) - if use_mpi: - compiler_path = which(os.getenv('CC_SEQ')) - else: - compiler_path = which(os.getenv('CC')) - - # list of CUDA compute capabilities to use can be specifed in two ways (where (2) overrules (1)): - # (1) in the easyconfig file, via the custom cuda_compute_capabilities; - # (2) in the EasyBuild configuration, via --cuda-compute-capabilities configuration option; - ec_cuda_cc = self.cfg['cuda_compute_capabilities'] - cfg_cuda_cc = build_option('cuda_compute_capabilities') - cuda_cc = cfg_cuda_cc or ec_cuda_cc or [] - - if cfg_cuda_cc and ec_cuda_cc: - warning_msg = "cuda_compute_capabilities specified in easyconfig (%s) are overruled by " % ec_cuda_cc - warning_msg += "--cuda-compute-capabilities configuration option (%s)" % cfg_cuda_cc - print_warning(warning_msg) - elif not cuda_cc: - warning_msg = "No CUDA compute capabilities specified, so using TensorFlow default " - warning_msg += "(which may not be optimal for your system).\nYou should use " - warning_msg += "the --cuda-compute-capabilities configuration option or the cuda_compute_capabilities " - warning_msg += "easyconfig parameter to specify a list of CUDA compute capabilities to compile with." - print_warning(warning_msg) - - # TensorFlow 1.12.1 requires compute capability >= 3.5 - # see https://github.com/tensorflow/tensorflow/pull/25767 - if LooseVersion(self.version) >= LooseVersion('1.12.1'): - faulty_comp_caps = [x for x in cuda_cc if LooseVersion(x) < LooseVersion('3.5')] - if faulty_comp_caps: - error_msg = "TensorFlow >= 1.12.1 requires CUDA compute capabilities >= 3.5, " - error_msg += "found one or more older ones: %s" - raise EasyBuildError(error_msg, ', '.join(faulty_comp_caps)) - - if cuda_cc: - self.log.info("Compiling with specified list of CUDA compute capabilities: %s", ', '.join(cuda_cc)) - - config_env_vars.update({ - 'CUDA_TOOLKIT_PATH': cuda_root, - 'GCC_HOST_COMPILER_PATH': compiler_path, - # This is the binutils bin folder: https://github.com/tensorflow/tensorflow/issues/39263 - 'GCC_HOST_COMPILER_PREFIX': self.binutils_bin_path, - 'TF_CUDA_COMPUTE_CAPABILITIES': ','.join(cuda_cc), - 'TF_CUDA_VERSION': cuda_maj_min_ver, - }) - - # for recent TensorFlow versions, $TF_CUDA_PATHS and $TF_CUBLAS_VERSION must also be set - if LooseVersion(self.version) >= LooseVersion('1.14'): - - # figure out correct major/minor version for CUBLAS from cublas_api.h - cublas_api_header_glob_pattern = os.path.join(cuda_root, 'targets', '*', 'include', 'cublas_api.h') - matches = glob.glob(cublas_api_header_glob_pattern) - if len(matches) == 1: - cublas_api_header_path = matches[0] - cublas_api_header_txt = read_file(cublas_api_header_path) - else: - raise EasyBuildError("Failed to isolate path to cublas_api.h: %s", matches) - - cublas_ver_parts = [] - for key in ['CUBLAS_VER_MAJOR', 'CUBLAS_VER_MINOR', 'CUBLAS_VER_PATCH']: - regex = re.compile("^#define %s ([0-9]+)" % key, re.M) - res = regex.search(cublas_api_header_txt) - if res: - cublas_ver_parts.append(res.group(1)) - else: - raise EasyBuildError("Failed to find pattern '%s' in %s", regex.pattern, cublas_api_header_path) - - config_env_vars.update({ - 'TF_CUDA_PATHS': cuda_root, - 'TF_CUBLAS_VERSION': '.'.join(cublas_ver_parts), - }) - - if cudnn_root: - cudnn_version = get_software_version('cuDNN') - cudnn_maj_min_patch_ver = '.'.join(cudnn_version.split('.')[:3]) - - config_env_vars.update({ - 'CUDNN_INSTALL_PATH': cudnn_root, - 'TF_CUDNN_VERSION': cudnn_maj_min_patch_ver, - }) - else: - raise EasyBuildError("TensorFlow has a strict dependency on cuDNN if CUDA is enabled") - if nccl_root: - nccl_version = get_software_version('NCCL') - # Ignore the PKG_REVISION identifier if it exists (i.e., report 2.4.6 for 2.4.6-1 or 2.4.6-2) - nccl_version = nccl_version.split('-')[0] - config_env_vars.update({ - 'NCCL_INSTALL_PATH': nccl_root, - }) - else: - nccl_version = '1.3' # Use simple downloadable version - config_env_vars.update({ - 'TF_NCCL_VERSION': nccl_version, - }) - if tensorrt_root: - tensorrt_version = get_software_version('TensorRT') - config_env_vars.update({ - 'TF_NEED_TENSORRT': '1', - 'TENSORRT_INSTALL_PATH': tensorrt_root, - 'TF_TENSORRT_VERSION': tensorrt_version, - }) - - for (key, val) in sorted(config_env_vars.items()): - env.setvar(key, val) - - # Does no longer apply (and might not be required at all) since 1.12.0 - if LooseVersion(self.version) < LooseVersion('1.12.0'): - # patch configure.py (called by configure script) to avoid that Bazel abuses $HOME/.cache/bazel - regex_subs = [(r"(run_shell\(\['bazel')", - r"\1, '--output_base=%s', '--install_base=%s'" % (self.output_base_dir, - self.install_base_dir))] - apply_regex_substitutions('configure.py', regex_subs) - - # Tell Bazel to not use $HOME/.cache/bazel at all - # See https://docs.bazel.build/versions/master/output_directories.html - env.setvar('TEST_TMPDIR', self.output_root_dir) - cmd = self.cfg['preconfigopts'] + './configure ' + self.cfg['configopts'] - run_cmd(cmd, log_all=True, simple=True) - - def patch_crosstool_files(self): - """Patches the CROSSTOOL files to include EasyBuild provided compiler paths""" - inc_paths, lib_paths = [], [] - - gcc_root = get_software_root('GCCcore') or get_software_root('GCC') - if gcc_root: - gcc_lib64 = os.path.join(gcc_root, 'lib64') - lib_paths.append(gcc_lib64) - - gcc_ver = get_software_version('GCCcore') or get_software_version('GCC') - - # figure out location of GCC include files - res = glob.glob(os.path.join(gcc_root, 'lib', 'gcc', '*', gcc_ver, 'include')) - if res and len(res) == 1: - gcc_lib_inc = res[0] - inc_paths.append(gcc_lib_inc) - else: - raise EasyBuildError("Failed to pinpoint location of GCC include files: %s", res) - - # make sure include-fixed directory is where we expect it to be - gcc_lib_inc_fixed = os.path.join(os.path.dirname(gcc_lib_inc), 'include-fixed') - if os.path.exists(gcc_lib_inc_fixed): - inc_paths.append(gcc_lib_inc_fixed) - else: - self.log.info("Derived directory %s does not exist, so discarding it", gcc_lib_inc_fixed) - - # also check on location of include/c++/<gcc version> directory - gcc_cplusplus_inc = os.path.join(gcc_root, 'include', 'c++', gcc_ver) - if os.path.exists(gcc_cplusplus_inc): - inc_paths.append(gcc_cplusplus_inc) - else: - raise EasyBuildError("Derived directory %s does not exist", gcc_cplusplus_inc) - else: - raise EasyBuildError("Failed to determine installation prefix for GCC") - - cuda_root = get_software_root('CUDA') - if cuda_root: - inc_paths.append(os.path.join(cuda_root, 'include')) - lib_paths.append(os.path.join(cuda_root, 'lib64')) - - # fix hardcoded locations of compilers & tools - cxx_inc_dirs = ['cxx_builtin_include_directory: "%s"' % resolve_path(p) for p in inc_paths] - cxx_inc_dirs += ['cxx_builtin_include_directory: "%s"' % p for p in inc_paths] - regex_subs = [ - (r'-B/usr/bin/', '-B%s %s' % (self.binutils_bin_path, ' '.join('-L%s/' % p for p in lib_paths))), - (r'(cxx_builtin_include_directory:).*', ''), - (r'^toolchain {', 'toolchain {\n' + '\n'.join(cxx_inc_dirs)), - ] - for tool in ['ar', 'cpp', 'dwp', 'gcc', 'gcov', 'ld', 'nm', 'objcopy', 'objdump', 'strip']: - path = which(tool) - if path: - regex_subs.append((os.path.join('/usr', 'bin', tool), path)) - else: - raise EasyBuildError("Failed to determine path to '%s'", tool) - - # -fPIE/-pie and -fPIC are not compatible, so patch out hardcoded occurences of -fPIE/-pie if -fPIC is used - if self.toolchain.options.get('pic', None): - regex_subs.extend([('-fPIE', '-fPIC'), ('"-pie"', '"-fPIC"')]) - - # patch all CROSSTOOL* scripts to fix hardcoding of locations of binutils/GCC binaries - for path, dirnames, filenames in os.walk(os.getcwd()): - for filename in filenames: - if filename.startswith('CROSSTOOL'): - full_path = os.path.join(path, filename) - self.log.info("Patching %s", full_path) - apply_regex_substitutions(full_path, regex_subs) - - def build_step(self): - """Custom build procedure for TensorFlow.""" - - # pre-create target installation directory - mkdir(os.path.join(self.installdir, self.pylibdir), parents=True) - - # This seems to be no longer required since at least 2.0, likely also for older versions - if LooseVersion(self.version) < LooseVersion('2.0'): - self.patch_crosstool_files() - - # compose "bazel build" command with all its options... - cmd = [ - self.cfg['prebuildopts'], - 'bazel', - '--output_base=%s' % self.output_base_dir, - '--install_base=%s' % self.install_base_dir, - '--output_user_root=%s' % self.output_user_root_dir, - 'build', - ] - - # build with optimization enabled - # cfr. https://docs.bazel.build/versions/master/user-manual.html#flag--compilation_mode - cmd.append('--compilation_mode=opt') - - # select 'opt' config section (this is *not* the same as --compilation_mode=opt!) - # https://docs.bazel.build/versions/master/user-manual.html#flag--config - cmd.append('--config=opt') - - # make Bazel print full command line + make it verbose on failures - # https://docs.bazel.build/versions/master/user-manual.html#flag--subcommands - # https://docs.bazel.build/versions/master/user-manual.html#flag--verbose_failures - cmd.extend(['--subcommands', '--verbose_failures']) - - # Disable support of AWS platform via config switch introduced in 1.12.1 - if LooseVersion(self.version) >= LooseVersion('1.12.1'): - cmd.append('--config=noaws') - - # Bazel seems to not be able to handle a large amount of parallel jobs, e.g. 176 on some Power machines, - # and will hang forever building the TensorFlow package. - # So limit to something high but still reasonable while allowing ECs to overwrite it - parallel = self.cfg['parallel'] - if self.cfg['maxparallel'] is None: - parallel = min(parallel, 64) - cmd.append('--jobs=%s' % parallel) - - if self.toolchain.options.get('pic', None): - cmd.append('--copt="-fPIC"') - - # include install location of Python packages in $PYTHONPATH, - # and specify that value of $PYTHONPATH should be passed down into Bazel build environment; - # this is required to make sure that Python packages included as extensions are found at build time; - # see also https://github.com/tensorflow/tensorflow/issues/22395 - pythonpath = os.getenv('PYTHONPATH', '') - env.setvar('PYTHONPATH', os.pathsep.join([os.path.join(self.installdir, self.pylibdir), pythonpath])) - - # Make TF find our modules. LD_LIBRARY_PATH gets automatically added by configure.py - cpaths, libpaths = self.system_libs_info[1:] - if cpaths: - cmd.append("--action_env=CPATH='%s'" % ':'.join(cpaths)) - if libpaths: - cmd.append("--action_env=LIBRARY_PATH='%s'" % ':'.join(libpaths)) - cmd.append('--action_env=PYTHONPATH') - # Also export $EBPYTHONPREFIXES to handle the multi-deps python setup - # See https://github.com/easybuilders/easybuild-easyblocks/pull/1664 - if 'EBPYTHONPREFIXES' in os.environ: - cmd.append('--action_env=EBPYTHONPREFIXES') - - # Ignore user environment for Python - cmd.append('--action_env=PYTHONNOUSERSITE=1') - - # use same configuration for both host and target programs, which can speed up the build - # only done when optarch is enabled, since this implicitely assumes that host and target platform are the same - # see https://docs.bazel.build/versions/master/guide.html#configurations - if self.toolchain.options.get('optarch'): - cmd.append('--distinct_host_configuration=false') - - cmd.append(self.cfg['buildopts']) - - # TF 2 (final) sets this in configure - if LooseVersion(self.version) < LooseVersion('2.0'): - if get_software_root('CUDA'): - cmd.append('--config=cuda') - - # if mkl-dnn is listed as a dependency it is used. Otherwise downloaded if with_mkl_dnn is true - mkl_root = get_software_root('mkl-dnn') - if mkl_root: - cmd.extend(['--config=mkl']) - cmd.insert(0, "export TF_MKL_DOWNLOAD=0 &&") - cmd.insert(0, "export TF_MKL_ROOT=%s &&" % mkl_root) - elif self.cfg['with_mkl_dnn']: - # this makes TensorFlow use mkl-dnn (cfr. https://github.com/01org/mkl-dnn) - cmd.extend(['--config=mkl']) - cmd.insert(0, "export TF_MKL_DOWNLOAD=1 && ") - - # specify target of the build command as last argument - cmd.append('//tensorflow/tools/pip_package:build_pip_package') - - run_cmd(' '.join(cmd), log_all=True, simple=True, log_ok=True) - - # run generated 'build_pip_package' script to build the .whl - cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir - run_cmd(cmd, log_all=True, simple=True, log_ok=True) - - def test_step(self): - """No (reliable) custom test procedure for TensorFlow.""" - pass - - def install_step(self): - """Custom install procedure for TensorFlow.""" - # find .whl file that was built, and install it using 'pip install' - if ("-rc" in self.version): - whl_version = self.version.replace("-rc", "rc") - else: - whl_version = self.version - - whl_paths = glob.glob(os.path.join(self.builddir, 'tensorflow-%s-*.whl' % whl_version)) - if not whl_paths: - whl_paths = glob.glob(os.path.join(self.builddir, 'tensorflow-*.whl')) - if len(whl_paths) == 1: - # --ignore-installed is required to ensure *this* wheel is installed - cmd = "pip install --ignore-installed --prefix=%s %s" % (self.installdir, whl_paths[0]) - - # if extensions are listed, assume they will provide all required dependencies, - # so use --no-deps to prevent pip from downloading & installing them - if self.cfg['exts_list']: - cmd += ' --no-deps' - - run_cmd(cmd, log_all=True, simple=True, log_ok=True) - else: - raise EasyBuildError("Failed to isolate built .whl in %s: %s", whl_paths, self.builddir) - - # Fix for https://github.com/tensorflow/tensorflow/issues/6341 on Python < 3.3 - # If the site-packages/google/__init__.py file is missing, make it an empty file. - # This fixes the "No module named google.protobuf" error that sometimes shows up during sanity_check - # For Python >= 3.3 the logic is reversed: The __init__.py must not exist. - # See e.g. http://python-notes.curiousefficiency.org/en/latest/python_concepts/import_traps.html - google_protobuf_dir = os.path.join(self.installdir, self.pylibdir, 'google', 'protobuf') - google_init_file = os.path.join(self.installdir, self.pylibdir, 'google', '__init__.py') - if LooseVersion(det_python_version(self.python_cmd)) < LooseVersion('3.3'): - if os.path.isdir(google_protobuf_dir) and not is_readable(google_init_file): - self.log.debug("Creating (empty) missing %s", google_init_file) - write_file(google_init_file, '') - else: - if os.path.exists(google_init_file): - self.log.debug("Removing %s for Python >= 3.3", google_init_file) - remove_file(google_init_file) - - # Fix cuda header paths - # This is needed for building custom TensorFlow ops - if LooseVersion(self.version) < LooseVersion('1.14'): - pyshortver = '.'.join(get_software_version('Python').split('.')[:2]) - regex_subs = [(r'#include "cuda/include/', r'#include "')] - base_path = os.path.join(self.installdir, 'lib', 'python%s' % pyshortver, 'site-packages', 'tensorflow', - 'include', 'tensorflow') - for header in glob.glob(os.path.join(base_path, 'stream_executor', 'cuda', 'cuda*.h')) + glob.glob( - os.path.join(base_path, 'core', 'util', 'cuda*.h')): - apply_regex_substitutions(header, regex_subs) - - def sanity_check_step(self): - """Custom sanity check for TensorFlow.""" - custom_paths = { - 'files': ['bin/tensorboard'], - 'dirs': [self.pylibdir], - } - - custom_commands = [ - "%s -c 'import tensorflow'" % self.python_cmd, - # tf_should_use importsweakref.finalize, which requires backports.weakref for Python < 3.4 - "%s -c 'from tensorflow.python.util import tf_should_use'" % self.python_cmd, - ] - res = super(EB_TensorFlow, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands) - - # test installation using MNIST tutorial examples - if self.cfg['runtest']: - pythonpath = os.getenv('PYTHONPATH', '') - env.setvar('PYTHONPATH', os.pathsep.join([os.path.join(self.installdir, self.pylibdir), pythonpath])) - - mnist_pys = [] - - if LooseVersion(self.version) < LooseVersion('2.0'): - mnist_pys.append('mnist_with_summaries.py') - - if LooseVersion(self.version) < LooseVersion('1.13'): - # mnist_softmax.py was removed in TensorFlow 1.13.x - mnist_pys.append('mnist_softmax.py') - - for mnist_py in mnist_pys: - datadir = tempfile.mkdtemp(suffix='-tf-%s-data' % os.path.splitext(mnist_py)[0]) - logdir = tempfile.mkdtemp(suffix='-tf-%s-logs' % os.path.splitext(mnist_py)[0]) - mnist_py = os.path.join(self.start_dir, 'tensorflow', 'examples', 'tutorials', 'mnist', mnist_py) - cmd = "%s %s --data_dir %s --log_dir %s" % (self.python_cmd, mnist_py, datadir, logdir) - run_cmd(cmd, log_all=True, simple=True, log_ok=True) - - # run test script (if any) - if self.test_script: - # copy test script to build dir before running it, to avoid that a file named 'tensorflow.py' - # (a customized TensorFlow easyblock for example) breaks 'import tensorflow' - test_script = os.path.join(self.builddir, os.path.basename(self.test_script)) - copy_file(self.test_script, test_script) - - run_cmd("python %s" % test_script, log_all=True, simple=True, log_ok=True) - - return res diff --git a/Custom_EasyBlocks/vmd.py b/Custom_EasyBlocks/vmd.py deleted file mode 100644 index 6f8a38e280cc45ea093c9592c62b7ae8e0d5cb14..0000000000000000000000000000000000000000 --- a/Custom_EasyBlocks/vmd.py +++ /dev/null @@ -1,232 +0,0 @@ -## -# Copyright 2009-2020 Ghent University -# Copyright 2015-2020 Stanford University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# the Hercules foundation (http://www.herculesstichting.be/in_English) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for VMD, implemented as an easyblock - -@author: Stephane Thiell (Stanford University) -@author: Kenneth Hoste (HPC-UGent) -""" -import os - -from distutils.version import LooseVersion -from easybuild.easyblocks.generic.configuremake import ConfigureMake -from easybuild.easyblocks.generic.pythonpackage import det_pylibdir -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import change_dir, copy_file, extract_file -from easybuild.tools.run import run_cmd -from easybuild.tools.modules import get_software_root, get_software_version -import easybuild.tools.environment as env -import easybuild.tools.toolchain as toolchain - - -class EB_VMD(ConfigureMake): - """Easyblock for building and installing VMD""" - - def __init__(self, *args, **kwargs): - """Initialize VMD-specific variables.""" - super(EB_VMD, self).__init__(*args, **kwargs) - # source tarballs contains a 'plugins' and 'vmd-<version>' directory - self.vmddir = os.path.join(self.builddir, '%s-%s' % (self.name.lower(), self.version)) - self.surf_dir = os.path.join(self.vmddir, 'lib', 'surf') - self.stride_dir = os.path.join(self.vmddir, 'lib', 'stride') - - def extract_step(self): - """Custom extract step for VMD.""" - super(EB_VMD, self).extract_step() - - if LooseVersion(self.version) >= LooseVersion("1.9.3"): - change_dir(self.surf_dir) - srcdir = extract_file('surf.tar.Z', os.getcwd(), change_into_dir=False) - change_dir(srcdir) - - def configure_step(self): - """ - Configure VMD for building. - """ - # make sure required dependencies are available - deps = {} - for dep in ['FLTK', 'OpenGL', 'netCDF', 'Python', 'Tcl', 'Tk']: - deps[dep] = get_software_root(dep) - if deps[dep] is None: - raise EasyBuildError("Required dependency %s is missing", dep) - - # optional dependencies - for dep in ['ACTC', 'CUDA', 'OptiX']: - deps[dep] = get_software_root(dep) - - # specify Tcl/Tk locations & libraries - tclinc = os.path.join(deps['Tcl'], 'include') - tcllib = os.path.join(deps['Tcl'], 'lib') - env.setvar('TCL_INCLUDE_DIR', tclinc) - env.setvar('TCL_LIBRARY_DIR', tcllib) - - env.setvar('TK_INCLUDE_DIR', os.path.join(deps['Tk'], 'include')) - env.setvar('TK_LIBRARY_DIR', os.path.join(deps['Tk'], 'lib')) - - tclshortver = '.'.join(get_software_version('Tcl').split('.')[:2]) - self.cfg.update('buildopts', 'TCLLDFLAGS="-ltcl%s"' % tclshortver) - - # Netcdf locations - netcdfinc = os.path.join(deps['netCDF'], 'include') - netcdflib = os.path.join(deps['netCDF'], 'lib') - - # Python locations - pymajver = get_software_version('Python').split('.')[0] - out, ec = run_cmd("python -c 'import sysconfig; print(sysconfig.get_path(\"include\"))'", simple=False) - if ec: - raise EasyBuildError("Failed to determine Python include path: %s", out) - else: - env.setvar('PYTHON_INCLUDE_DIR', out.strip()) - pylibdir = det_pylibdir() - python_libdir = os.path.join(deps['Python'], os.path.dirname(pylibdir)) - env.setvar('PYTHON_LIBRARY_DIR', python_libdir) - out, ec = run_cmd("python%s-config --libs" % pymajver, simple=False) - if ec: - raise EasyBuildError("Failed to determine Python library name: %s", out) - else: - env.setvar('PYTHON_LIBRARIES', out.strip()) - - # numpy include location, easiest way to determine it is via numpy.get_include() - out, ec = run_cmd("python -c 'import numpy; print(numpy.get_include())'", simple=False) - if ec: - raise EasyBuildError("Failed to determine numpy include directory: %s", out) - else: - env.setvar('NUMPY_INCLUDE_DIR', out.strip()) - - # compiler commands - self.cfg.update('buildopts', 'CC="%s"' % os.getenv('CC')) - self.cfg.update('buildopts', 'CCPP="%s"' % os.getenv('CXX')) - - # plugins need to be built first (see http://www.ks.uiuc.edu/Research/vmd/doxygen/compiling.html) - change_dir(os.path.join(self.builddir, 'plugins')) - cmd = ' '.join([ - 'make', - 'LINUXAMD64', - "TCLINC='-I%s'" % tclinc, - "TCLLIB='-L%s'" % tcllib, - "TCLLDFLAGS='-ltcl%s'" % tclshortver, - "NETCDFINC='-I%s'" % netcdfinc, - "NETCDFLIB='-L%s'" % netcdflib, - self.cfg['buildopts'], - ]) - run_cmd(cmd, log_all=True, simple=False) - - # create plugins distribution - plugindir = os.path.join(self.vmddir, 'plugins') - env.setvar('PLUGINDIR', plugindir) - self.log.info("Generating VMD plugins in %s", plugindir) - run_cmd("make distrib %s" % self.cfg['buildopts'], log_all=True, simple=False) - - # explicitely mention whether or not we're building with CUDA/OptiX support - if deps['CUDA']: - self.log.info("Building with CUDA %s support", get_software_version('CUDA')) - if deps['OptiX']: - self.log.info("Building with Nvidia OptiX %s support", get_software_version('OptiX')) - else: - self.log.warn("Not building with Nvidia OptiX support!") - else: - self.log.warn("Not building with CUDA nor OptiX support!") - - # see http://www.ks.uiuc.edu/Research/vmd/doxygen/configure.html - # LINUXAMD64: Linux 64-bit - # LP64: build VMD as 64-bit binary - # IMD: enable support for Interactive Molecular Dynamics (e.g. to connect to NAMD for remote simulations) - # PTHREADS: enable support for POSIX threads - # COLVARS: enable support for collective variables (related to NAMD/LAMMPS) - # NOSILENT: verbose build command - # FLTK: enable the standard FLTK GUI - # TK: enable TK to support extension GUI elements - # OPENGL: enable OpenGL - self.cfg.update( - 'configopts', "LINUXAMD64 LP64 IMD PTHREADS COLVARS NOSILENT FLTK TK OPENGL", allow_duplicate=False) - - # add additional configopts based on available dependencies - for key in deps: - if deps[key]: - if key == 'Mesa' or key == 'OpenGL': - self.cfg.update('configopts', "OPENGL MESA", allow_duplicate=False) - elif key == 'OptiX': - self.cfg.update('configopts', "LIBOPTIX", allow_duplicate=False) - elif key == 'Python': - self.cfg.update('configopts', "PYTHON NUMPY", allow_duplicate=False) - else: - self.cfg.update('configopts', key.upper(), allow_duplicate=False) - - # configure for building with Intel compilers specifically - if self.toolchain.comp_family() == toolchain.INTELCOMP: - self.cfg.update('configopts', 'ICC', allow_duplicate=False) - - # specify install location using environment variables - env.setvar('VMDINSTALLBINDIR', os.path.join(self.installdir, 'bin')) - env.setvar('VMDINSTALLLIBRARYDIR', os.path.join(self.installdir, 'lib')) - - # configure in vmd-<version> directory - change_dir(self.vmddir) - run_cmd("%s ./configure %s" % (self.cfg['preconfigopts'], self.cfg['configopts'])) - - # change to 'src' subdirectory, ready for building - change_dir(os.path.join(self.vmddir, 'src')) - - def build_step(self): - """Custom build step for VMD.""" - super(EB_VMD, self).build_step() - - self.have_stride = False - # Build Surf, which is part of VMD as of VMD version 1.9.3 - if LooseVersion(self.version) >= LooseVersion("1.9.3"): - change_dir(self.surf_dir) - surf_build_cmd = 'make CC="%s" OPT="%s"' % (os.environ['CC'], os.environ['CFLAGS']) - run_cmd(surf_build_cmd) - # Build Stride if it was downloaded - if os.path.exists(os.path.join(self.stride_dir, 'Makefile')): - change_dir(self.stride_dir) - self.have_stride = True - stride_build_cmd = 'make CC="%s" CFLAGS="%s"' % (os.environ['CC'], os.environ['CFLAGS']) - run_cmd(stride_build_cmd) - else: - self.log.info("Stride has not been downloaded and/or unpacked.") - - def install_step(self): - """Custom build step for VMD.""" - - # Install must also be done in 'src' subdir - change_dir(os.path.join(self.vmddir, 'src')) - super(EB_VMD, self).install_step() - - if LooseVersion(self.version) >= LooseVersion("1.9.3"): - surf_bin = os.path.join(self.surf_dir, 'surf') - copy_file(surf_bin, os.path.join(self.installdir, 'lib', 'surf_LINUXAMD64')) - if self.have_stride: - stride_bin = os.path.join(self.stride_dir, 'stride') - copy_file(stride_bin, os.path.join(self.installdir, 'lib', 'stride_LINUXAMD64')) - - def sanity_check_step(self): - """Custom sanity check for VMD.""" - custom_paths = { - 'files': ['bin/vmd'], - 'dirs': ['lib'], - } - super(EB_VMD, self).sanity_check_step(custom_paths=custom_paths) diff --git a/Custom_Toolchains/compiler/nvhpc.py b/Custom_Toolchains/compiler/nvhpc.py deleted file mode 100644 index 8aef532d07d921f290d73b170b5fc43faccaf4fa..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/compiler/nvhpc.py +++ /dev/null @@ -1,111 +0,0 @@ -## -# Copyright 2015 Bart Oldeman -# -# This file is triple-licensed under GPLv2 (see below), MIT, and -# BSD three-clause licenses. -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# the Hercules foundation (http://www.herculesstichting.be/in_English) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -Support for NVIDIA HPC SDK ('NVHPC') compilers (nvc, nvc++, nvfortran) as toolchain compilers. NVHPC is the successor of the PGI compilers, on which this file is based upon. - -:author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada) -:author: Damian Alvarez (Forschungszentrum Juelich GmbH) -:author: Andreas Herten (Forschungszentrum Juelich GmbH) -""" - -from distutils.version import LooseVersion - -import easybuild.tools.systemtools as systemtools -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.toolchain.compiler import Compiler - - -TC_CONSTANT_NVHPC = "NVHPC" - - -class Nvhpc(Compiler): - """NVHPC compiler class - """ - - COMPILER_MODULE_NAME = ['NVHPC'] - - COMPILER_FAMILY = TC_CONSTANT_NVHPC - - # References: - # https://docs.nvidia.com/hpc-sdk/compilers/hpc-compilers-user-guide/index.html - # nvc --help - # And previously, for PGI: - # http://www.pgroup.com/doc/pgiref.pdf - # http://www.pgroup.com/products/freepgi/freepgi_ref/ch02.html#Mflushz - # http://www.pgroup.com/products/freepgi/freepgi_ref/ch02.html#Mfprelaxed - # http://www.pgroup.com/products/freepgi/freepgi_ref/ch02.html#Mfpapprox - COMPILER_UNIQUE_OPTION_MAP = { - 'i8': 'i8', - 'r8': 'r8', - 'optarch': '', # PGI by default generates code for the arch it is running on! - 'openmp': 'mp', - 'ieee': 'Kieee', - 'strict': ['Mnoflushz','Kieee'], - 'precise': ['Mnoflushz'], - 'defaultprec': ['Mflushz'], - 'loose': ['Mfprelaxed'], - 'veryloose': ['Mfprelaxed=div,order,intrinsic,recip,sqrt,rsqrt', 'Mfpapprox'], - 'vectorize': {False: 'Mnovect', True: 'Mvect'}, - } - - # used when 'optarch' toolchain option is enabled (and --optarch is not specified) - COMPILER_OPTIMAL_ARCHITECTURE_OPTION = { - (systemtools.X86_64, systemtools.AMD): 'tp=host', - (systemtools.X86_64, systemtools.INTEL): 'tp=host', - } - # used with --optarch=GENERIC - COMPILER_GENERIC_OPTION = { - (systemtools.X86_64, systemtools.AMD): 'tp=px', - (systemtools.X86_64, systemtools.INTEL): 'tp=px', - } - - COMPILER_CC = 'nvc' - # C++ compiler command is version-dependent, see below - COMPILER_CXX = 'nvc++' - - COMPILER_F77 = 'nvfortran' - COMPILER_F90 = 'nvfortran' - COMPILER_FC = 'nvfortran' - - LINKER_TOGGLE_STATIC_DYNAMIC = { - 'static': '-Bstatic', - 'dynamic':'-Bdynamic', - } - - def _set_compiler_flags(self): - """Set -tp=x64 if optarch is set to False.""" - if not self.options.get('optarch', False): - self.variables.nextend('OPTFLAGS', ['tp=x64']) - super(Nvhpc, self)._set_compiler_flags() - - def _set_compiler_vars(self): - """Set the compiler variables""" - nvhpc_version = self.get_software_version(self.COMPILER_MODULE_NAME)[0] - - super(Nvhpc, self)._set_compiler_vars() - diff --git a/Custom_Toolchains/compiler/pgi.py b/Custom_Toolchains/compiler/pgi.py deleted file mode 100644 index 38b51f274cbc797041c65b8f21597e23febc90d2..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/compiler/pgi.py +++ /dev/null @@ -1,117 +0,0 @@ -## -# Copyright 2015 Bart Oldeman -# -# This file is triple-licensed under GPLv2 (see below), MIT, and -# BSD three-clause licenses. -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# the Hercules foundation (http://www.herculesstichting.be/in_English) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -Support for PGI compilers (pgcc, pgc++, pgf90/pgfortran) as toolchain compilers. - -:author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada) -:author: Damian Alvarez (Forschungszentrum Juelich GmbH) -""" - -from distutils.version import LooseVersion - -import easybuild.tools.systemtools as systemtools -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.toolchain.compiler import Compiler - - -TC_CONSTANT_PGI = "PGI" - - -class Pgi(Compiler): - """PGI compiler class - """ - - COMPILER_MODULE_NAME = ['PGI'] - - COMPILER_FAMILY = TC_CONSTANT_PGI - - # References: - # http://www.pgroup.com/doc/pgiref.pdf - # http://www.pgroup.com/products/freepgi/freepgi_ref/ch02.html#Mflushz - # http://www.pgroup.com/products/freepgi/freepgi_ref/ch02.html#Mfprelaxed - # http://www.pgroup.com/products/freepgi/freepgi_ref/ch02.html#Mfpapprox - COMPILER_UNIQUE_OPTION_MAP = { - 'i8': 'i8', - 'r8': 'r8', - 'optarch': '', # PGI by default generates code for the arch it is running on! - 'openmp': 'mp', - 'ieee': 'Kieee', - 'strict': ['Mnoflushz','Kieee'], - 'precise': ['Mnoflushz'], - 'defaultprec': ['Mflushz'], - 'loose': ['Mfprelaxed'], - 'veryloose': ['Mfprelaxed=div,order,intrinsic,recip,sqrt,rsqrt', 'Mfpapprox'], - 'vectorize': {False: 'Mnovect', True: 'Mvect'}, - } - - # used when 'optarch' toolchain option is enabled (and --optarch is not specified) - COMPILER_OPTIMAL_ARCHITECTURE_OPTION = { - (systemtools.X86_64, systemtools.AMD): '', - (systemtools.X86_64, systemtools.INTEL): '', - } - # used with --optarch=GENERIC - COMPILER_GENERIC_OPTION = { - (systemtools.X86_64, systemtools.AMD): 'tp=x64', - (systemtools.X86_64, systemtools.INTEL): 'tp=x64', - } - - COMPILER_CC = 'pgcc' - # C++ compiler command is version-dependent, see below - COMPILER_CXX = None - - COMPILER_F77 = 'pgf77' - COMPILER_F90 = 'pgf90' - COMPILER_FC = 'pgfortran' - - LINKER_TOGGLE_STATIC_DYNAMIC = { - 'static': '-Bstatic', - 'dynamic':'-Bdynamic', - } - - def _set_compiler_flags(self): - """Set -tp=x64 if optarch is set to False.""" - if not self.options.get('optarch', False): - self.variables.nextend('OPTFLAGS', ['tp=x64']) - super(Pgi, self)._set_compiler_flags() - - def _set_compiler_vars(self): - """Set the compiler variables""" - pgi_version = self.get_software_version(self.COMPILER_MODULE_NAME)[0] - - # based on feedback from PGI support: use pgc++ with PGI 14.10 and newer, pgCC for older versions - if LooseVersion(pgi_version) >= LooseVersion('14.10'): - self.COMPILER_CXX = 'pgc++' - else: - self.COMPILER_CXX = 'pgCC' - - if LooseVersion(pgi_version) >= LooseVersion('19.1'): - self.COMPILER_F77 = 'pgfortran' - else: - self.COMPILER_F77 = 'pgf77' - - super(Pgi, self)._set_compiler_vars() diff --git a/Custom_Toolchains/fft/intelfftw.py b/Custom_Toolchains/fft/intelfftw.py deleted file mode 100644 index 4e89a667bd2c7b5b57dac7fde9d5fc29fb2c5ce4..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/fft/intelfftw.py +++ /dev/null @@ -1,114 +0,0 @@ -## -# Copyright 2012-2018 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -Support for Intel FFTW as toolchain FFT library. - -:author: Stijn De Weirdt (Ghent University) -:author: Kenneth Hoste (Ghent University) -""" -import os -from distutils.version import LooseVersion - -from easybuild.tools.build_log import EasyBuildError, dry_run_warning -from easybuild.tools.config import build_option -from easybuild.toolchains.fft.fftw import Fftw -from easybuild.tools.modules import get_software_root, get_software_version - - -class IntelFFTW(Fftw): - """FFTW wrapper functionality of Intel MKL""" - - FFT_MODULE_NAME = ['imkl'] - - FFT_LIB_GROUP = True - FFT_LIB_STATIC = True - - def _set_fftw_variables(self): - if not hasattr(self, 'BLAS_LIB_DIR'): - raise EasyBuildError("_set_fftw_variables: IntelFFT based on IntelMKL (no BLAS_LIB_DIR found)") - - imklver = get_software_version(self.FFT_MODULE_NAME[0]) - - picsuff = '' - if self.options.get('pic', None): - picsuff = '_pic' - bitsuff = '_lp64' - if self.options.get('i8', None): - bitsuff = '_ilp64' - compsuff = '_intel' - if get_software_root('icc') is None: - if get_software_root('PGI'): - compsuff = '_pgi' - elif get_software_root('GCC') or get_software_root('GCCcore'): - compsuff = '_gnu' - else: - raise EasyBuildError("Not using Intel compilers, PGI nor GCC, don't know compiler suffix for FFTW libraries.") - - interface_lib = "fftw3xc%s%s" % (compsuff, picsuff) - fftw_libs = [interface_lib] - cluster_interface_lib = None - if self.options.get('usempi', False): - # add cluster interface for recent imkl versions - if LooseVersion(imklver) >= LooseVersion('10.3'): - suff = picsuff - if LooseVersion(imklver) >= LooseVersion('11.0.2'): - suff = bitsuff + suff - cluster_interface_lib = 'fftw3x_cdft%s' % suff - fftw_libs.append(cluster_interface_lib) - fftw_libs.append("mkl_cdft_core") # add cluster dft - fftw_libs.extend(self.variables['LIBBLACS'].flatten()) # add BLACS; use flatten because ListOfList - - self.log.debug('fftw_libs %s' % fftw_libs.__repr__()) - fftw_libs.extend(self.variables['LIBBLAS'].flatten()) # add BLAS libs (contains dft) - self.log.debug('fftw_libs %s' % fftw_libs.__repr__()) - - self.FFT_LIB_DIR = self.BLAS_LIB_DIR - self.FFT_INCLUDE_DIR = [os.path.join(d, 'fftw') for d in self.BLAS_INCLUDE_DIR] - - # building the FFTW interfaces is optional, - # so make sure libraries are there before FFT_LIB is set - imklroot = get_software_root(self.FFT_MODULE_NAME[0]) - fft_lib_dirs = [os.path.join(imklroot, d) for d in self.FFT_LIB_DIR] - fftw_lib_exists = lambda x: any([os.path.exists(os.path.join(d, "lib%s.a" % x)) for d in fft_lib_dirs]) - if not fftw_lib_exists(interface_lib) and LooseVersion(imklver) >= LooseVersion("10.2"): - # interface libs can be optional: - # MKL >= 10.2 include fftw3xc and fftw3xf interfaces in LIBBLAS=libmkl_gf/libmkl_intel - # See https://software.intel.com/en-us/articles/intel-mkl-main-libraries-contain-fftw3-interfaces - # The cluster interface libs (libfftw3x_cdft*) can be omitted if the toolchain does not provide MPI-FFTW - # interfaces. - fftw_libs = [l for l in fftw_libs if l not in [interface_lib, cluster_interface_lib]] - - # filter out libraries from list of FFTW libraries to check for if they are not provided by Intel MKL - check_fftw_libs = [lib for lib in fftw_libs if lib not in ['dl', 'gfortran']] - - if all([fftw_lib_exists(lib) for lib in check_fftw_libs]): - self.FFT_LIB = fftw_libs - else: - msg = "Not all FFTW interface libraries %s are found in %s" % (check_fftw_libs, fft_lib_dirs) - msg += ", can't set $FFT_LIB." - if self.dry_run: - dry_run_warning(msg, silent=build_option('silent')) - else: - raise EasyBuildError(msg) diff --git a/Custom_Toolchains/gmvapich2c.py b/Custom_Toolchains/gmvapich2c.py deleted file mode 100644 index 918f86f5ebacc274f87352e4f3770e9450cab1da..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/gmvapich2c.py +++ /dev/null @@ -1,42 +0,0 @@ -## -# Copyright 2012-2016 Ghent University -# Copyright 2016-2016 Forschungszentrum Juelich -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# http://github.com/hpcugent/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for gmvapich2 compiler toolchain (includes GCC and MVAPICH2, and CUDA as a dependency). - -@author: Kenneth Hoste (Ghent University) -@author: Damian Alvarez (Forschungszentrum Juelich) -""" - -from easybuild.toolchains.gcc import GccToolchain -# We pull in MPI and CUDA at once so this maps nicely to HMNS -from easybuild.toolchains.mpi.mvapich2 import Mvapich2 -from easybuild.toolchains.compiler.cuda import Cuda - -# Order matters here! -class Gmvapich2c(GccToolchain, Cuda, Mvapich2): - """Compiler toolchain with GCC and MVAPICH2, and CUDA as a dependency.""" - NAME = 'gmvapich2c' - SUBTOOLCHAIN = GccToolchain.NAME diff --git a/Custom_Toolchains/gmvmklc.py b/Custom_Toolchains/gmvmklc.py deleted file mode 100644 index 6394f7e3b226ef6843ab18f30b069d099e3bcd1a..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/gmvmklc.py +++ /dev/null @@ -1,43 +0,0 @@ -## -# Copyright 2016-2016 Ghent University -# Copyright 2016-2016 Forschungszentrum Juelich -# -# This file is triple-licensed under GPLv2 (see below), MIT, and -# BSD three-clause licenses. -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# http://github.com/hpcugent/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for gmvmklc compiler toolchain (includes GCC, MVAPICH2 and MKL, and CUDA as dependency). - -@author: Damian Alvarez (Forschungszentrum Juelich) -""" - -from easybuild.toolchains.gmvapich2c import Gmvapich2c -from easybuild.toolchains.fft.intelfftw import IntelFFTW -from easybuild.toolchains.linalg.intelmkl import IntelMKL - - -class Gmvmklc(Gmvapich2c, IntelMKL, IntelFFTW): - """Compiler toolchain with GCC, MVAPICH2 and MKL, and CUDA as dependency.""" - NAME = 'gmvmklc' - SUBTOOLCHAIN = Gmvapich2c.NAME diff --git a/Custom_Toolchains/gmvolfc.py b/Custom_Toolchains/gmvolfc.py deleted file mode 100644 index c5ce82ba892144082109cfe7850233c40dd582f4..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/gmvolfc.py +++ /dev/null @@ -1,45 +0,0 @@ -## -# Copyright 2016-2016 Ghent University -# Copyright 2016-2016 Forschungszentrum Juelich -# -# This file is triple-licensed under GPLv2 (see below), MIT, and -# BSD three-clause licenses. -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# http://github.com/hpcugent/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for gmvolfc compiler toolchain (includes GCC, MVAPICH2, OpenBLAS, LAPACK, ScaLAPACK -and FFTW, and CUDA as dependency). - -@author: Damian Alvarez (Forschungszentrum Juelich) -""" - -from easybuild.toolchains.gmvapich2c import Gmvapich2c -from easybuild.toolchains.fft.fftw import Fftw -from easybuild.toolchains.linalg.openblas import OpenBLAS -from easybuild.toolchains.linalg.scalapack import ScaLAPACK - - -class Gmvolfc(Gmvapich2c, OpenBLAS, ScaLAPACK, Fftw): - """Compiler toolchain with GCC, MVAPICH2, OpenBLAS, ScaLAPACK and FFTW, and CUDA as dependency.""" - NAME = 'gmvolfc' - SUBTOOLCHAIN = Gmvapich2c.NAME diff --git a/Custom_Toolchains/iimpi.py b/Custom_Toolchains/iimpi.py deleted file mode 100644 index 0a2104872b2fcccb93384a413890fe8a2f032abd..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/iimpi.py +++ /dev/null @@ -1,111 +0,0 @@ -## -# Copyright 2012-2021 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for intel compiler toolchain (includes Intel compilers (icc, ifort), Intel MPI). - -:author: Stijn De Weirdt (Ghent University) -:author: Kenneth Hoste (Ghent University) -""" -from distutils.version import LooseVersion -import re - -from easybuild.toolchains.iccifort import IccIfort -from easybuild.toolchains.intel_compilers import IntelCompilersToolchain -from easybuild.toolchains.mpi.intelmpi import IntelMPI - - -class Iimpi(IccIfort, IntelCompilersToolchain, IntelMPI): - """ - Compiler toolchain with Intel compilers (icc/ifort), Intel MPI. - """ - NAME = 'iimpi' - # compiler-only subtoolchain can't be determine statically - # since depends on toolchain version (see below), - # so register both here as possible alternatives (which is taken into account elsewhere) - SUBTOOLCHAIN = [(IntelCompilersToolchain.NAME, IccIfort.NAME)] - - def __init__(self, *args, **kwargs): - """Constructor for Iimpi toolchain class.""" - - super(Iimpi, self).__init__(*args, **kwargs) - - # make sure a non-symbolic version (e.g., 'system') is used before making comparisons using LooseVersion - if re.match('^[0-9]', self.version): - # need to transform a version like '2016a' with something that is safe to compare with '8.0', '2016.01' - # comparing subversions that include letters causes TypeErrors in Python 3 - # 'a' is assumed to be equivalent with '.01' (January), and 'b' with '.07' (June) - # (good enough for this purpose) - self.iimpi_ver = self.version.replace('a', '.01').replace('b', '.07') - if LooseVersion(self.iimpi_ver) >= LooseVersion('2020.12'): - self.oneapi_gen = True - self.SUBTOOLCHAIN = IntelCompilersToolchain.NAME - self.COMPILER_MODULE_NAME = IntelCompilersToolchain.COMPILER_MODULE_NAME - else: - self.oneapi_gen = False - self.SUBTOOLCHAIN = IccIfort.NAME - self.COMPILER_MODULE_NAME = IccIfort.COMPILER_MODULE_NAME - else: - self.iimpi_ver = self.version - self.oneapi_gen = False - - def is_deprecated(self): - """Return whether or not this toolchain is deprecated.""" - - deprecated = False - - # make sure a non-symbolic version (e.g., 'system') is used before making comparisons using LooseVersion - if re.match('^[0-9]', str(self.iimpi_ver)): - loosever = LooseVersion(self.iimpi_ver) - # iimpi toolchains older than iimpi/2016.01 are deprecated - # iimpi 8.1.5 is an exception, since it used in intel/2016a (which is not deprecated yet) - if loosever < LooseVersion('8.0'): - deprecated = True - elif loosever > LooseVersion('2000') and loosever < LooseVersion('2016.01'): - deprecated = True - - return deprecated - - def is_dep_in_toolchain_module(self, *args, **kwargs): - """Check whether a specific software name is listed as a dependency in the module for this toolchain.""" - if self.oneapi_gen: - res = IntelCompilersToolchain.is_dep_in_toolchain_module(self, *args, **kwargs) - else: - res = IccIfort.is_dep_in_toolchain_module(self, *args, **kwargs) - - return res - - def _set_compiler_vars(self): - """Intel compilers-specific adjustments after setting compiler variables.""" - if self.oneapi_gen: - IntelCompilersToolchain._set_compiler_vars(self) - else: - IccIfort._set_compiler_vars(self) - - def set_variables(self): - """Intel compilers-specific adjustments after setting compiler variables.""" - if self.oneapi_gen: - IntelCompilersToolchain.set_variables(self) - else: - IccIfort.set_variables(self) diff --git a/Custom_Toolchains/imvapich2c.py b/Custom_Toolchains/imvapich2c.py deleted file mode 100644 index b5656074ff71563661a8e8028f2a123f95691efa..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/imvapich2c.py +++ /dev/null @@ -1,41 +0,0 @@ -## -# Copyright 2016-2016 Ghent University -# Copyright 2016-2016 Forschungszentrum Juelich -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# http://github.com/hpcugent/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for imvapich2c compiler toolchain (includes Intel and MVAPICH2, and CUDA as dependency). - -@author: Damian Alvarez (Forschungszentrum Juelich) -""" - -from easybuild.toolchains.iccifort import IccIfort -# We pull in MPI and CUDA at once so this maps nicely to HMNS -from easybuild.toolchains.mpi.mvapich2 import Mvapich2 -from easybuild.toolchains.compiler.cuda import Cuda - -# Order matters here! -class Imvapich2c(IccIfort, Cuda, Mvapich2): - """Compiler toolchain with Intel and MVAPICH2, with CUDA as dependency.""" - NAME = 'imvapich2c' - SUBTOOLCHAIN = IccIfort.NAME diff --git a/Custom_Toolchains/imvmklc.py b/Custom_Toolchains/imvmklc.py deleted file mode 100644 index 92f88582f41fda21946afdcce19aa6c2755673be..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/imvmklc.py +++ /dev/null @@ -1,43 +0,0 @@ -## -# Copyright 2016-2016 Ghent University -# Copyright 2016-2016 Forschungszentrum Juelich -# -# This file is triple-licensed under GPLv2 (see below), MIT, and -# BSD three-clause licenses. -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# http://github.com/hpcugent/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for imvmklc compiler toolchain (includes Intel, MVAPICH2 and MKL, and CUDA as dependency). - -@author: Damian Alvarez (Forschungszentrum Juelich) -""" - -from easybuild.toolchains.imvapich2c import Imvapich2c -from easybuild.toolchains.fft.intelfftw import IntelFFTW -from easybuild.toolchains.linalg.intelmkl import IntelMKL - - -class Imvmklc(Imvapich2c, IntelMKL, IntelFFTW): - """Compiler toolchain with Intel, MVAPICH2 and MKL, and CUDA as dependency.""" - NAME = 'imvmklc' - SUBTOOLCHAIN = Imvapich2c.NAME diff --git a/Custom_Toolchains/linalg/intelmkl.py b/Custom_Toolchains/linalg/intelmkl.py deleted file mode 100644 index 8f46f7caf3805b867c25927f44bd2846c4e85391..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/linalg/intelmkl.py +++ /dev/null @@ -1,214 +0,0 @@ -## -# Copyright 2012-2021 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -Support for Intel MKL as toolchain linear algebra library. -:author: Stijn De Weirdt (Ghent University) -:author: Kenneth Hoste (Ghent University) -""" -import os -from distutils.version import LooseVersion - -from easybuild.toolchains.compiler.gcc import TC_CONSTANT_GCC -from easybuild.toolchains.compiler.inteliccifort import TC_CONSTANT_INTELCOMP -from easybuild.toolchains.compiler.pgi import TC_CONSTANT_PGI -from easybuild.toolchains.mpi.intelmpi import TC_CONSTANT_INTELMPI -from easybuild.toolchains.mpi.mpich import TC_CONSTANT_MPICH -from easybuild.toolchains.mpi.mpich2 import TC_CONSTANT_MPICH2 -from easybuild.toolchains.mpi.mvapich2 import TC_CONSTANT_MVAPICH2 -from easybuild.toolchains.mpi.openmpi import TC_CONSTANT_OPENMPI -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.toolchain.linalg import LinAlg - - -TC_CONSTANT_INTELMKL = 'IntelMKL' - - -class IntelMKL(LinAlg): - """Support for Intel MKL.""" - - # library settings are inspired by http://software.intel.com/en-us/articles/intel-mkl-link-line-advisor - BLAS_MODULE_NAME = ['imkl'] - BLAS_LIB_MAP = { - "lp64": '_lp64', - "interface": None, - "interface_mt": None, - } - BLAS_LIB = ["mkl_%(interface)s%(lp64)s", "mkl_sequential", "mkl_core"] - BLAS_LIB_MT = ["mkl_%(interface)s%(lp64)s", - "mkl_%(interface_mt)s_thread", "mkl_core"] - BLAS_LIB_GROUP = True - BLAS_LIB_STATIC = True - BLAS_FAMILY = TC_CONSTANT_INTELMKL - - LAPACK_MODULE_NAME = ['imkl'] - LAPACK_IS_BLAS = True - LAPACK_FAMILY = TC_CONSTANT_INTELMKL - - BLACS_MODULE_NAME = ['imkl'] - BLACS_LIB = ["mkl_blacs%(mpi)s%(lp64)s"] - BLACS_LIB_MAP = {'mpi': None} - BLACS_LIB_GROUP = True - BLACS_LIB_STATIC = True - - SCALAPACK_MODULE_NAME = ['imkl'] - SCALAPACK_LIB = ["mkl_scalapack%(lp64_sc)s"] - SCALAPACK_LIB_MT = ["mkl_scalapack%(lp64_sc)s"] - SCALAPACK_LIB_MAP = {'lp64_sc': '_lp64'} - SCALAPACK_REQUIRES = ['LIBBLACS', 'LIBBLAS'] - SCALAPACK_LIB_GROUP = True - SCALAPACK_LIB_STATIC = True - - def __init__(self, *args, **kwargs): - """Toolchain constructor.""" - class_constants = kwargs.setdefault('class_constants', []) - class_constants.extend( - ['BLAS_LIB_MAP', 'SCALAPACK_LIB', 'SCALAPACK_LIB_MT', 'SCALAPACK_LIB_MAP']) - super(IntelMKL, self).__init__(*args, **kwargs) - - def set_variables(self): - """Set the variables""" - - # for recent versions of Intel MKL, -ldl should be used for linking; - # the Intel MKL Link Advisor specifies to always do this, - # but it is only needed when statically linked with Intel MKL, - # and only strictly needed for some compilers (e.g. PGI) - mkl_version = self.get_software_version(self.BLAS_MODULE_NAME)[0] - if LooseVersion(mkl_version) >= LooseVersion('11') and self.COMPILER_FAMILY in [TC_CONSTANT_PGI]: - self.log.info( - "Adding -ldl as extra library when linking with Intel MKL libraries (for v11.x and newer)") - if self.LIB_EXTRA is None: - self.LIB_EXTRA = ['dl'] - elif 'dl' not in self.LIB_EXTRA: - self.LIB_EXTRA.append('dl') - - super(IntelMKL, self).set_variables() - - def _set_blas_variables(self): - """Fix the map a bit""" - interfacemap = { - TC_CONSTANT_INTELCOMP: 'intel', - TC_CONSTANT_GCC: 'gf', - # Taken from https://www.pgroup.com/support/link.htm#mkl - TC_CONSTANT_PGI: 'intel', - } - try: - self.BLAS_LIB_MAP.update({ - "interface": interfacemap[self.COMPILER_FAMILY], - }) - except Exception: - raise EasyBuildError("_set_blas_variables: interface unsupported combination with MPI family %s", - self.COMPILER_FAMILY) - - interfacemap_mt = { - TC_CONSTANT_INTELCOMP: 'intel', - TC_CONSTANT_GCC: 'gnu', - TC_CONSTANT_PGI: 'pgi', - } - try: - self.BLAS_LIB_MAP.update( - {"interface_mt": interfacemap_mt[self.COMPILER_FAMILY]}) - except Exception: - raise EasyBuildError("_set_blas_variables: interface_mt unsupported combination with compiler family %s", - self.COMPILER_FAMILY) - - if self.options.get('32bit', None): - # 32bit - self.BLAS_LIB_MAP.update({"lp64": ''}) - if self.options.get('i8', None): - # ilp64/i8 - self.BLAS_LIB_MAP.update({"lp64": '_ilp64'}) - # CPP / CFLAGS - self.variables.nappend_el('CFLAGS', 'DMKL_ILP64') - - # exact paths/linking statements depend on imkl version - found_version = self.get_software_version(self.BLAS_MODULE_NAME)[0] - ver = LooseVersion(found_version) - if ver < LooseVersion('10.3'): - if self.options.get('32bit', None): - self.BLAS_LIB_DIR = ['lib/32'] - else: - self.BLAS_LIB_DIR = ['lib/em64t'] - self.BLAS_INCLUDE_DIR = ['include'] - else: - if self.options.get('32bit', None): - raise EasyBuildError("_set_blas_variables: 32-bit libraries not supported yet for IMKL v%s (> v10.3)", - found_version) - else: - if ver >= LooseVersion('2021'): - basedir = os.path.join('mkl', found_version) - else: - basedir = 'mkl' - - self.BLAS_LIB_DIR = [os.path.join(basedir, 'lib', 'intel64')] - if ver >= LooseVersion('10.3.4') and ver < LooseVersion('11.1'): - self.BLAS_LIB_DIR.append( - os.path.join('compiler', 'lib', 'intel64')) - elif ver < LooseVersion('2021'): - self.BLAS_LIB_DIR.append(os.path.join('lib', 'intel64')) - - self.BLAS_INCLUDE_DIR = [os.path.join(basedir, 'include')] - - super(IntelMKL, self)._set_blas_variables() - - def _set_blacs_variables(self): - mpimap = { - TC_CONSTANT_OPENMPI: '_openmpi', - TC_CONSTANT_INTELMPI: '_intelmpi', - TC_CONSTANT_MVAPICH2: '_intelmpi', - # use intelmpi MKL blacs library for both MPICH v2 and v3 - # cfr. https://software.intel.com/en-us/articles/intel-mkl-link-line-advisor - # note: MKL link advisor uses 'MPICH' for MPICH v1 - TC_CONSTANT_MPICH2: '_intelmpi', - TC_CONSTANT_MPICH: '_intelmpi', - } - try: - self.BLACS_LIB_MAP.update({'mpi': mpimap[self.MPI_FAMILY]}) - except Exception: - raise EasyBuildError("_set_blacs_variables: mpi unsupported combination with MPI family %s", - self.MPI_FAMILY) - - self.BLACS_LIB_DIR = self.BLAS_LIB_DIR - self.BLACS_INCLUDE_DIR = self.BLAS_INCLUDE_DIR - - super(IntelMKL, self)._set_blacs_variables() - - def _set_scalapack_variables(self): - imkl_version = self.get_software_version(self.BLAS_MODULE_NAME)[0] - if LooseVersion(imkl_version) < LooseVersion('10.3'): - self.SCALAPACK_LIB.append("mkl_solver%(lp64)s_sequential") - self.SCALAPACK_LIB_MT.append("mkl_solver%(lp64)s") - - if self.options.get('32bit', None): - # 32 bit - self.SCALAPACK_LIB_MAP.update({"lp64_sc": '_core'}) - - elif self.options.get('i8', None): - # ilp64/i8 - self.SCALAPACK_LIB_MAP.update({"lp64_sc": '_ilp64'}) - - self.SCALAPACK_LIB_DIR = self.BLAS_LIB_DIR - self.SCALAPACK_INCLUDE_DIR = self.BLAS_INCLUDE_DIR - - super(IntelMKL, self)._set_scalapack_variables() diff --git a/Custom_Toolchains/nvhpc.py b/Custom_Toolchains/nvhpc.py deleted file mode 100644 index abff3ec914298a2a4e62b22827705cbd62a39149..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/nvhpc.py +++ /dev/null @@ -1,46 +0,0 @@ -## -# Copyright 2015 Bart Oldeman -# -# This file is triple-licensed under GPLv2 (see below), MIT, and -# BSD three-clause licenses. -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# the Hercules foundation (http://www.herculesstichting.be/in_English) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for NVHPC compiler toolchain. - -@author: Andreas Herten (Forschungszentrum Juelich) -""" - -from easybuild.toolchains.compiler.nvhpc import Nvhpc -from easybuild.toolchains.gcccore import GCCcore -from easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME - - -class NvhpcToolchain(Nvhpc): - """Simple toolchain with just the NVIDIA HPC SDK compilers.""" - NAME = 'NVHPC' - # use GCCcore as subtoolchain rather than GCC, since two 'real' compiler-only toolchains don't mix well, - # in particular in a hierarchical module naming scheme - SUBTOOLCHAIN = [GCCcore.NAME, SYSTEM_TOOLCHAIN_NAME] - OPTIONAL = False - diff --git a/Custom_Toolchains/pmvapich2c.py b/Custom_Toolchains/pmvapich2c.py deleted file mode 100644 index 9328b2122c159fae822bc0611729eb390163a180..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/pmvapich2c.py +++ /dev/null @@ -1,41 +0,0 @@ -## -# Copyright 2016-2016 Ghent University -# Copyright 2016-2016 Forschungszentrum Juelich -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# http://github.com/hpcugent/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for pmvapich2c compiler toolchain (includes PGI and MVAPICH2, and CUDA as dependency). - -@author: Damian Alvarez (Forschungszentrum Juelich) -""" - -from easybuild.toolchains.pgi import PgiToolchain -# We pull in MPI and CUDA at once so this maps nicely to HMNS -from easybuild.toolchains.mpi.mvapich2 import Mvapich2 -from easybuild.toolchains.compiler.cuda import Cuda - -# Order matters! -class Pmvapich2c(PgiToolchain, Cuda, Mvapich2): - """Compiler toolchain with PGI and MVAPICH2, with CUDA as dependency.""" - NAME = 'pmvapich2c' - SUBTOOLCHAIN = PgiToolchain.NAME diff --git a/Custom_Toolchains/pmvmklc.py b/Custom_Toolchains/pmvmklc.py deleted file mode 100644 index 08e355c6b6dad4d0b3faad8290033626ad58b2f8..0000000000000000000000000000000000000000 --- a/Custom_Toolchains/pmvmklc.py +++ /dev/null @@ -1,43 +0,0 @@ -## -# Copyright 2016-2016 Ghent University -# Copyright 2016-2016 Forschungszentrum Juelich -# -# This file is triple-licensed under GPLv2 (see below), MIT, and -# BSD three-clause licenses. -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# http://github.com/hpcugent/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. -## -""" -EasyBuild support for pmvmklc compiler toolchain (includes PGI, MVAPICH2 and MKL, and CUDA as dependency). - -@author: Damian Alvarez (Forschungszentrum Juelich) -""" - -from easybuild.toolchains.pmvapich2c import Pmvapich2c -from easybuild.toolchains.fft.intelfftw import IntelFFTW -from easybuild.toolchains.linalg.intelmkl import IntelMKL - - -class Pmvmklc(Pmvapich2c, IntelMKL, IntelFFTW): - """Compiler toolchain with PGI, MVAPICH2 and MKL, and CUDA as dependency.""" - NAME = 'pmvmklc' - SUBTOOLCHAIN = Pmvapich2c.NAME diff --git a/Golden_Repo/README.md b/Golden_Repo/README.md index 562613436a92416d67b53b6bd629ce217267ba91..a2b608e83cea0825b31a67a04fbd29f116856816 100644 --- a/Golden_Repo/README.md +++ b/Golden_Repo/README.md @@ -1,63 +1,42 @@ -The table below shows the details of the toolchains in the 2020 stage: +The table below shows the details of the toolchains in the 2022 stage: - Base | Toolchain name | Toolchain version | Underlying GCC | Compiler | MPI | CUDA | Math libraries | Includes software from | |-----------------|---------------------------|----------------|------------------|-------------------------|----------|----------------|---------------------------| -| GCCcore | 9.3.0 | 9.3.0 | | | | | | -| GCCcore | 10.3.0 | 10.3.0 | | | | | | +| GCCcore | TBD | TBD | | | | | | - Compilers | Toolchain name | Toolchain version | Underlying GCC | Compiler | MPI | CUDA | Math libraries | Includes software from | |-----------------|---------------------------|----------------|------------------|-------------------------|----------|----------------|---------------------------| -| GCC | 9.3.0 | 9.3.0 | GCC 9.3.0 | | | | GCCcore | -| NVHPC | 20.7-GCC-9.3.0 | 9.3.0 | NVHPC 20.7 | | 11.0.X§ | | GCCcore | -| NVHPC | 20.9-GCC-9.3.0 | 9.3.0 | NVHPC 20.9 | | 11.0.X§ | | GCCcore | -| NVHPC | 21.1-GCC-9.3.0 | 9.3.0 | NVHPC 21.1 | | 11.0.X§ | | GCCcore | -| iccifort | 2020.2.254-GCC-9.3.0 | 9.3.0 | Intel 2020.2.254 | | | | GCCcore | -| GCC | 10.3.0 | 10.3.0 | GCC 10.3.0 | | | | GCCcore | -| NVHPC | 21.5-GCC-10.3.0 | 10.3.0 | NVHPC 21.5 | | 11.3.X§ | | GCCcore | -| intel-compilers | 2021.2.0-GCC-10.3.0 | 10.3.0 | Intel 2021.2.0 | | | | GCCcore | +| GCC | TBD | TBD | GCC TBD | | | | GCCcore | +| NVHPC | TBD-GCC-TBD | TBD | NVHPC TBD | | TBD§ | | GCCcore | +| intel-compilers | TBD-GCC-TBD | TBD | Intel TBD | | | | GCCcore | - Compilers+MPI | Toolchain name | Toolchain version | Underlying GCC | Compiler | MPI | CUDA | Math libraries | Includes software from | |-----------------|---------------------------|----------------|------------------|-------------------------|----------|----------------|---------------------------| -| gpsmpi | 2020 | 9.3.0 | GCC 9.3.0 | ParaStationMPI 5.4.X | 11.0.X§ | | GCCcore, GCC | -| npsmpic | 2020 | 9.3.0 | NVHPC 20.7 | ParaStationMPI 5.4.X | 11.0.X | | GCCcore, NVHPC | -| npsmpic | 2020.1 | 9.3.0 | NVHPC 21.1 | ParaStationMPI 5.4.X | 11.0.X | | GCCcore, NVHPC | -| ipsmpi | 2020 | 9.3.0 | Intel 2020.2.254 | ParaStationMPI 5.4.X | 11.0.X§ | | GCCcore, iccifort | -| ipsmpi | 2020-mt | 9.3.0 | Intel 2020.2.254 | ParaStationMPI 5.4.X MT | 11.0.X§ | | GCCcore, iccifort | -| iimpi | 2020 | 9.3.0 | Intel 2020.2.254 | Intel MPI 2019.8.254 | | | GCCcore, iccifort | -| gompi | 2020 | 9.3.0 | GCC 9.3.0 | OpenMPI 4.1.0rc1 | 11.0.X§ | | GCCcore, GCC | -| iompi | 2020 | 9.3.0 | Intel 2020.2.254 | OpenMPI 4.1.0rc1 | 11.0.X§ | | GCCcore, iccifort | -| gpsmpi | 2021 | 10.3.0 | GCC 10.3.0 | ParaStationMPI 5.4.X | 11.3.X§ | | GCCcore, GCC | -| nvompic | 2021 | 10.3.0 | NVHPC 21.5 | OpenMPI 4.1.1 | 11.3.X | | GCCcore, NVHPC | -| npsmpic | 2021 | 10.3.0 | NVHPC 21.5 | ParaStationMPI 5.4.X | 11.3.X | | GCCcore, NVHPC | -| ipsmpi | 2021 | 10.3.0 | Intel 2021.2.0 | ParaStationMPI 5.4.X | 11.3.X§ | | GCCcore, intel-compilers | -| iimpi | 2021 | 10.3.0 | Intel 2021.2.0 | Intel MPI 2021.2.0 | | | GCCcore, intel-compilers | -| gompi | 2021 | 10.3.0 | GCC 10.3.0 | OpenMPI 4.1.1 | 11.3.X§ | | GCCcore, GCC | -| iompi | 2021 | 10.3.0 | Intel 2021.2.0 | OpenMPI 4.1.1 | 11.3.X§ | | GCCcore, intel-compilers | +| gpsmpi | 2022 | TBD | GCC TBD | ParaStationMPI 5.4.X | TBD§ | | GCCcore, GCC | +| nvompic | 2022 | TBD | NVHPC TBD | OpenMPI TBD | TBD | | GCCcore, NVHPC | +| npsmpic | 2022 | TBD | NVHPC TBD | ParaStationMPI 5.4.X | TBD | | GCCcore, NVHPC | +| ipsmpi | 2022 | TBD | Intel TBD | ParaStationMPI 5.4.X | TBD§ | | GCCcore, intel-compilers | +| iimpi | 2022 | TBD | Intel TBD | Intel MPI TBD | | | GCCcore, intel-compilers | +| gompi | 2022 | TBD | GCC TBD | OpenMPI TBD | TBD§ | | GCCcore, GCC | +| iompi | 2022 | TBD | Intel TBD | OpenMPI TBD | TBD§ | | GCCcore, intel-compilers | - Compilers+MPI+Math | Toolchain name | Toolchain version | Underlying GCC | Compiler | MPI | CUDA | Math libraries | Includes software from | |-----------------|---------------------------|----------------|------------------|-------------------------|----------|----------------|---------------------------| -| gpsmkl | 2020 | 9.3.0 | GCC 9.3.0 | ParaStationMPI 5.4.X | 11.0.X§ | MKL 2020.2.254 | GCCcore, GCC, gpsmpi | -| gomkl | 2020 | 9.3.0 | GCC 9.3.0 | OpenMPI 4.1.0rc1 | 11.0.X§ | MKL 2020.2.254 | GCCcore, GCC, gompi | -| intel | 2020 | 9.3.0 | Intel 2020.2.254 | Intel MPI 2019.8.254 | | MKL 2020.2.254 | GCCcore, GCC, iimpi | -| intel-para | 2020 | 9.3.0 | Intel 2020.2.254 | ParaStationMPI 5.4.X | | MKL 2020.2.254 | GCCcore, GCC, ipsmpi | -| intel-para | 2020-mt | 9.3.0 | Intel 2020.2.254 | ParaStationMPI 5.4.X MT | | MKL 2020.2.254 | GCCcore, GCC, ipsmpi | -| iomkl | 2020 | 9.3.0 | Intel 2020.2.254 | OpenMPI 4.1.0rc1 | 11.0.X§ | MKL 2020.2.254 | GCCcore, GCC, iompi | -| gpsmkl | 2021 | 10.3.0 | GCC 10.3.0 | ParaStationMPI 5.4.X | 11.3.X§ | MKL 2021.2.0 | GCCcore, GCC, gpsmpi | -| gomkl | 2021 | 10.3.0 | GCC 10.3.0 | OpenMPI 4.1.1 | 11.3.X§ | MKL 2021.2.0 | GCCcore, GCC, gompi | -| intel | 2021 | 10.3.0 | Intel 2021.2.0 | Intel MPI 2021.2.0 | | MKL 2021.2.0 | GCCcore, GCC, iimpi | -| intel-para | 2021 | 10.3.0 | Intel 2021.2.0 | ParaStationMPI 5.4.X | | MKL 2021.2.0 | GCCcore, GCC, ipsmpi | -| iomkl | 2021 | 10.3.0 | Intel 2021.2.0 | OpenMPI 4.1.1 | 11.3.X§ | MKL 2021.2.0 | GCCcore, GCC, iompi | +| gpsmkl | 2022 | TBD | GCC TBD | ParaStationMPI 5.4.X | TBD§ | MKL TBD | GCCcore, GCC, gpsmpi | +| gomkl | 2022 | TBD | GCC TBD | OpenMPI TBD | TBD§ | MKL TBD | GCCcore, GCC, gompi | +| intel | 2022 | TBD | Intel TBD | Intel MPI TBD | | MKL TBD | GCCcore, GCC, iimpi | +| intel-para | 2022 | TBD | Intel TBD | ParaStationMPI 5.4.X | | MKL TBD | GCCcore, GCC, ipsmpi | +| iomkl | 2022 | TBD | Intel TBD | OpenMPI TBD | TBD§ | MKL TBD | GCCcore, GCC, iompi | § Not included in the toolchain just as dependency -`intel-compilers` is the EasyBuild naming for the oneAPI compilers The `-mt` toolchain versions dissapeared in 2021. They are installed now as "side MPIs" diff --git a/Golden_Repo/a/ABINIT/ABINIT-8.10.3-gpsmkl-2020.eb b/Golden_Repo/a/ABINIT/ABINIT-8.10.3-gpsmkl-2020.eb deleted file mode 100644 index 2773fe2a45a9d6d2e799e312166d1bddab81512a..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ABINIT/ABINIT-8.10.3-gpsmkl-2020.eb +++ /dev/null @@ -1,84 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ABINIT' -version = '8.10.3' - -homepage = 'https://www.abinit.org/' -description = """ -ABINIT is a package whose main program allows one to find the total energy, charge density and electronic structure of -systems made of electrons and nuclei (molecules and periodic solids) within Density Functional Theory (DFT), using -pseudopotentials and a planewave or wavelet basis. -""" - -site_contacts = 'Sebastian Achilles (s.achilles@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True, 'openmp': True, 'pic': True} - -source_urls = ['https://www.abinit.org/sites/default/files/packages/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['ed626424b4472b93256622fbb9c7645fa3ffb693d4b444b07d488771ea7eaa75'] - -patches = [ - 'abinit_8.10.3_m_polynomial_coeff.patch' -] - -## -# AtomPAW-4.1.0.5-*-2020.eb is lastest version to be used with ABINIT 8.10.x -## -dependencies = [ - ('libxc', '3.0.1'), - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('HDF5', '1.10.6'), - ('Wannier90', '2.0.1.1', '-abinit'), - ('AtomPAW', '4.1.0.5'), -] - -preconfigopts = 'export FCFLAGS="-ffree-line-length-none $FCFLAGS" && ' - -# ensure mpi and intel toolchain -configopts = '--enable-mpi ' - -# linalg & fft -configopts += '--with-linalg-flavor=mkl ' -configopts += '--with-fft-flavor=dfti --with-fft-libs="$LIBFFT" ' - -# dft flavor -configopts += '--with-dft-flavor=libxc+wannier90+atompaw ' - -# libXC variant -configopts += '--with-libxc-incs="-I$EBROOTLIBXC/include" ' -configopts += '--with-libxc-libs="-L$EBROOTLIBXC/lib -lxcf90 -lxc" ' - -# wannier90 variant -configopts += '--with-wannier90-bins="$EBROOTWANNIER90/bin" ' -configopts += '--with-wannier90-incs="-I$EBROOTWANNIER90/include" ' -configopts += '--with-wannier90-libs="-L$EBROOTWANNIER90/lib -lwannier90" ' - -# atompaw variant -configopts += '--with-atompaw-bins="$EBROOTATOMPAW/bin" ' -configopts += '--with-atompaw-incs="-I$EBROOTATOMPAW/include" ' -configopts += '--with-atompaw-libs="-L$EBROOTATOMPAW/lib -latompaw" ' - -# trio flavor -configopts += '--with-trio-flavor=netcdf ' - -# netCDF support -configopts += '--with-netcdf-incs="-I$EBROOTNETCDF/include -I$EBROOTNETCDFMINFORTRAN/include" ' -configopts += '--with-netcdf-libs="-L$EBROOTNETCDF/lib64 -lnetcdf -L$EBROOTNETCDFMINFORTRAN/lib -lnetcdff" ' - -# Enable double precision for GW calculations -configopts += '--enable-gw-dpc ' - -# Enable OpenMP -configopts += '--enable-openmp ' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['abinit', 'aim', 'cut3d', 'conducti', 'mrgddb', 'mrgscr', 'optic']], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ABINIT/ABINIT-8.10.3-intel-para-2020.eb b/Golden_Repo/a/ABINIT/ABINIT-8.10.3-intel-para-2020.eb deleted file mode 100644 index c2fe2a33347865703937e4eb2adc68be0e8d2a9d..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ABINIT/ABINIT-8.10.3-intel-para-2020.eb +++ /dev/null @@ -1,80 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ABINIT' -version = '8.10.3' - -homepage = 'https://www.abinit.org/' -description = """ -ABINIT is a package whose main program allows one to find the total energy, charge density and electronic structure of -systems made of electrons and nuclei (molecules and periodic solids) within Density Functional Theory (DFT), using -pseudopotentials and a planewave or wavelet basis. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True, 'openmp': True, 'pic': True} - -source_urls = ['https://www.abinit.org/sites/default/files/packages/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['ed626424b4472b93256622fbb9c7645fa3ffb693d4b444b07d488771ea7eaa75'] - -## -# AtomPAW-4.1.0.5-*-2020.eb is lastest version to be used with ABINIT 8.10.x -## -dependencies = [ - ('libxc', '3.0.1'), - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('HDF5', '1.10.6'), - ('Wannier90', '2.0.1.1', '-abinit'), - ('AtomPAW', '4.1.0.5'), -] - -preconfigopts = 'export FCFLAGS="-ffree-line-length-none $FCFLAGS" && ' - -# ensure mpi and intel toolchain -configopts = '--enable-mpi ' - -# linalg & fft -configopts += '--with-linalg-flavor=mkl ' -configopts += '--with-fft-flavor=dfti --with-fft-libs="$LIBFFT" ' - -# dft flavor -configopts += '--with-dft-flavor=libxc+wannier90+atompaw ' - -# libXC variant -configopts += '--with-libxc-incs="-I$EBROOTLIBXC/include" ' -configopts += '--with-libxc-libs="-L$EBROOTLIBXC/lib -lxcf90 -lxc" ' - -# wannier90 variant -configopts += '--with-wannier90-bins="$EBROOTWANNIER90/bin" ' -configopts += '--with-wannier90-incs="-I$EBROOTWANNIER90/include" ' -configopts += '--with-wannier90-libs="-L$EBROOTWANNIER90/lib -lwannier90" ' - -# atompaw variant -configopts += '--with-atompaw-bins="$EBROOTATOMPAW/bin" ' -configopts += '--with-atompaw-incs="-I$EBROOTATOMPAW/include" ' -configopts += '--with-atompaw-libs="-L$EBROOTATOMPAW/lib -latompaw" ' - -# trio flavor -configopts += '--with-trio-flavor=netcdf ' - -# netCDF support -configopts += '--with-netcdf-incs="-I$EBROOTNETCDF/include -I$EBROOTNETCDFMINFORTRAN/include" ' -configopts += '--with-netcdf-libs="-L$EBROOTNETCDF/lib64 -lnetcdf -L$EBROOTNETCDFMINFORTRAN/lib -lnetcdff" ' - -# Enable double precision for GW calculations -configopts += '--enable-gw-dpc ' - -# Enable OpenMP -configopts += '--enable-openmp ' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['abinit', 'aim', 'cut3d', 'conducti', 'mrgddb', 'mrgscr', 'optic']], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ABINIT/ABINIT-9.4.2-gpsmkl-2021.eb b/Golden_Repo/a/ABINIT/ABINIT-9.4.2-gpsmkl-2021.eb deleted file mode 100644 index 537ee53e6b56f40eb80969306d6ec0709103e327..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ABINIT/ABINIT-9.4.2-gpsmkl-2021.eb +++ /dev/null @@ -1,75 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ABINIT' -version = '9.4.2' - -homepage = 'https://www.abinit.org/' -description = """ -ABINIT is a package whose main program allows one to find the total energy, charge density and electronic structure of -systems made of electrons and nuclei (molecules and periodic solids) within Density Functional Theory (DFT), using -pseudopotentials and a planewave or wavelet basis. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'usempi': True, 'openmp': True, 'pic': True} - -source_urls = ['https://www.abinit.org/sites/default/files/packages/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['d40886f5c8b138bb4aa1ca05da23388eb70a682790cfe5020ecce4db1b1a76bc'] - -builddependencies = [ - ('Python', '3.8.5'), -] -dependencies = [ - ('libxc', '5.1.5'), - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('HDF5', '1.10.6'), - ('Wannier90', '3.1.0'), -] - - -# Needed due to changes in GCC10. -preconfigopts = 'export FCFLAGS="-fallow-argument-mismatch -ffree-line-length-none $FCFLAGS" && ' -preconfigopts += 'export FFLAGS="-fallow-argument-mismatch -ffree-line-length-none $FFLAGS" && ' - -# Ensure MPI -configopts = '--with-mpi="yes" ' - -# Enable OpenMP -configopts += '--enable-openmp="yes" ' - -# BLAS/Lapack from MKL -configopts += '--with-linalg-flavor=mkl ' - -# FFTW from MKL -configopts += '--with-fft-flavor=dfti FFT_LIBS="$LIBFFT" ' - -# libxc support -configopts += '--with-libxc=${EBROOTLIBXC} ' - -# hdf5/netcdf4 support -configopts += '--with-netcdf="${EBROOTNETCDF}" ' -configopts += '--with-netcdf-fortran="${EBROOTNETCDFMINFORTRAN}" ' -configopts += '--with-hdf5="${EBROOTHDF5}" ' - -# Wannier90 -configopts += '--with-wannier90="${EBROOTWANNIER90}" ' -preconfigopts += 'export WANNIER90_LIBS="-L$EBROOTWANNIER90/lib -lwannier" && ' - -# Enable double precision for GW calculations -configopts += '--enable-gw-dpc ' - -# Enable OpenMP -configopts += '--enable-openmp ' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['abinit', 'aim', 'cut3d', 'conducti', 'mrgddb', 'mrgscr', 'optic']], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ABINIT/ABINIT-9.4.2-intel-para-2021.eb b/Golden_Repo/a/ABINIT/ABINIT-9.4.2-intel-para-2021.eb deleted file mode 100644 index 8028aa3254b972a1e8efc76cb859b0180b7b9514..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ABINIT/ABINIT-9.4.2-intel-para-2021.eb +++ /dev/null @@ -1,72 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ABINIT' -version = '9.4.2' - -homepage = 'https://www.abinit.org/' -description = """ -ABINIT is a package whose main program allows one to find the total energy, charge density and electronic structure of -systems made of electrons and nuclei (molecules and periodic solids) within Density Functional Theory (DFT), using -pseudopotentials and a planewave or wavelet basis. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'usempi': True, 'openmp': True, 'pic': True} - -source_urls = ['https://www.abinit.org/sites/default/files/packages/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['d40886f5c8b138bb4aa1ca05da23388eb70a682790cfe5020ecce4db1b1a76bc'] - -builddependencies = [ - ('Python', '3.8.5'), -] -dependencies = [ - ('libxc', '5.1.5'), - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('HDF5', '1.10.6'), - ('Wannier90', '3.1.0'), -] - -preconfigopts = 'export FCFLAGS="-ffree-line-length-none $FCFLAGS" && ' - -# Ensure MPI -configopts = '--with-mpi="yes" ' - -# Enable OpenMP -configopts += '--enable-openmp="yes" ' - -# BLAS/Lapack from MKL -configopts += '--with-linalg-flavor=mkl ' - -# FFTW from MKL -configopts += '--with-fft-flavor=dfti FFT_LIBS="$LIBFFT" ' - -# libxc support -configopts += '--with-libxc=${EBROOTLIBXC} ' - -# hdf5/netcdf4 support -configopts += '--with-netcdf="${EBROOTNETCDF}" ' -configopts += '--with-netcdf-fortran="${EBROOTNETCDFMINFORTRAN}" ' -configopts += '--with-hdf5="${EBROOTHDF5}" ' - -# Wannier90 -configopts += '--with-wannier90="${EBROOTWANNIER90}" ' -preconfigopts += 'export WANNIER90_LIBS="-L$EBROOTWANNIER90/lib -lwannier" && ' - -# Enable double precision for GW calculations -configopts += '--enable-gw-dpc ' - -# Enable OpenMP -configopts += '--enable-openmp ' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['abinit', 'aim', 'cut3d', 'conducti', 'mrgddb', 'mrgscr', 'optic']], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ABINIT/abinit_8.10.3_m_polynomial_coeff.patch b/Golden_Repo/a/ABINIT/abinit_8.10.3_m_polynomial_coeff.patch deleted file mode 100644 index 191ca94428366109d3665208236721abf2cae47b..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ABINIT/abinit_8.10.3_m_polynomial_coeff.patch +++ /dev/null @@ -1,42 +0,0 @@ -Patch based on https://github.com/abinit/abinit/issues/24 - ---- abinit-8.10.3.orig/src/78_effpot/m_polynomial_coeff.F90 2019-06-24 07:00:10.000000000 +0200 -+++ abinit-8.10.3/src/78_effpot/m_polynomial_coeff.F90 2020-08-14 15:55:33.552846884 +0200 -@@ -2517,6 +2517,7 @@ - integer :: ia,ib,ii,icoeff1,icoeff_tmp - integer :: iterm,nbody_in,ncoeff_max,pa,pb - integer :: ndisp_max,nterm_max -+ integer :: icoeff1_temp - real(dp):: coefficient - logical :: need_compute,compatible,possible,need_anharmstr,need_spcoupling,need_distributed - !arrays -@@ -2639,8 +2640,9 @@ - end if!end if power_disp < power_disp_min - - if(compatible)then -+ icoeff1_temp = icoeff1 - call computeNorder(cell,coeffs_out,compatibleCoeffs,list_coeff,list_str,index_coeff,& --& icoeff1,icoeff_tot,natom,ncoeff,nstr,ncoeff_out,nrpt,nsym,power_disp+1,& -+& icoeff1_temp,icoeff_tot,natom,ncoeff,nstr,ncoeff_out,nrpt,nsym,power_disp+1,& - & power_disp_min,power_disp_max,symbols,nbody=nbody_in,compute=need_compute,& - & anharmstr=need_anharmstr,spcoupling=need_spcoupling) - end if -@@ -2770,6 +2772,7 @@ - !Local variables --------------------------------------- - !scalar - integer :: icoeff1,icoeff2,nbody_in,ii,jj -+ integer :: icoeff1_temp - logical :: need_compute,compatible,possible,need_anharmstr,need_spcoupling - logical :: need_only_odd_power,need_only_even_power - !arrays -@@ -2879,8 +2882,9 @@ - - ! If the model is still compatbile with the input flags, we continue. - if(compatible)then -+ icoeff1_temp = icoeff1 - call computeCombinationFromList(cell,compatibleCoeffs,list_coeff,list_str,& --& index_coeff,list_combination,icoeff1,max_power_strain,& -+& index_coeff,list_combination,icoeff1_temp,max_power_strain,& - & nmodel_tot,natom,ncoeff,nstr,nmodel,nrpt,nsym,power_disp+1,& - & power_disp_min,power_disp_max,symbols,nbody=nbody_in,& - & compute=need_compute,anharmstr=need_anharmstr,& diff --git a/Golden_Repo/a/ACTC/ACTC-1.1-GCCcore-10.3.0.eb b/Golden_Repo/a/ACTC/ACTC-1.1-GCCcore-10.3.0.eb deleted file mode 100644 index f8fe5a1fd5950e1ec437525aed6e20e6375ae5fb..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ACTC/ACTC-1.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'MakeCp' - -name = 'ACTC' -version = '1.1' - -homepage = 'https://sourceforge.net/projects/actc' -description = "ACTC converts independent triangles into triangle strips or fans." - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['3a1303291629b9de6008c3c9d7b020a4b854802408fb3f8222ec492808c8b44d'] - -builddependencies = [('binutils', '2.36.1')] - -buildopts = 'CC="$CC" CFLAGS="$CFLAGS"' - -files_to_copy = [ - (['tcsample', 'tctest', 'tctest2'], 'bin'), - (['tc.h'], 'include/ac'), - (['libactc.a'], 'lib'), - 'COPYRIGHT', 'manual.html', 'prims.gif', 'README', -] - -sanity_check_paths = { - 'files': ['bin/tctest', 'bin/tctest2', 'bin/tcsample', 'include/ac/tc.h', 'lib/libactc.a', - 'COPYRIGHT', 'manual.html', 'prims.gif', 'README'], - 'dirs': [], -} - -modextrapaths = {'CPATH': 'include/ac'} - -moduleclass = 'lib' diff --git a/Golden_Repo/a/ACTC/ACTC-1.1-GCCcore-9.3.0.eb b/Golden_Repo/a/ACTC/ACTC-1.1-GCCcore-9.3.0.eb deleted file mode 100644 index 0fadef8caee1892531f945a1893cf8de6c34ffea..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ACTC/ACTC-1.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'MakeCp' - -name = 'ACTC' -version = '1.1' - -homepage = 'https://sourceforge.net/projects/actc' -description = "ACTC converts independent triangles into triangle strips or fans." - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['3a1303291629b9de6008c3c9d7b020a4b854802408fb3f8222ec492808c8b44d'] - -builddependencies = [('binutils', '2.34')] - -buildopts = 'CC="$CC" CFLAGS="$CFLAGS"' - -files_to_copy = [ - (['tcsample', 'tctest', 'tctest2'], 'bin'), - (['tc.h'], 'include/ac'), - (['libactc.a'], 'lib'), - 'COPYRIGHT', 'manual.html', 'prims.gif', 'README', -] - -sanity_check_paths = { - 'files': ['bin/tctest', 'bin/tctest2', 'bin/tcsample', 'include/ac/tc.h', 'lib/libactc.a', - 'COPYRIGHT', 'manual.html', 'prims.gif', 'README'], - 'dirs': [], -} - -modextrapaths = {'CPATH': 'include/ac'} - -moduleclass = 'lib' diff --git a/Golden_Repo/a/AMBER/AMBER-20-gpsmkl-2020.eb b/Golden_Repo/a/AMBER/AMBER-20-gpsmkl-2020.eb deleted file mode 100644 index 963bc7600c6680ede746291fc538d27a23d37126..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AMBER/AMBER-20-gpsmkl-2020.eb +++ /dev/null @@ -1,86 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'AMBER' -version = '20' - -homepage = 'http://ambermd.org' -description = """ -AMBER: 'Assisted Model Building with Energy Refinement' is a set of molecular -mechanics force fields and a package of molecular simulation programs. - -Citation: -D.A. Case, K. Belfon, I.Y. Ben-Shalom, S.R. Brozell, D.S. Cerutti, -T.E. Cheatham, III, V.W.D. Cruzeiro, T.A. Darden, R.E. Duke, G. Giambasu, -M.K. Gilson, H. Gohlke, A.W. Goetz, R. Harris, S. Izadi, S.A. Izmailov, -K. Kasavajhala, A. Kovalenko, R. Krasny, T. Kurtzman, T.S. Lee, S. LeGrand, -P. Li, C. Lin, J. Liu, T. Luchko, R. Luo, V. Man, K.M. Merz, Y. Miao, -O. Mikhailovskii, G. Monard, H. Nguyen, A. Onufriev, F.Pan, S. Pantano, -R. Qi, D.R. Roe, A. Roitberg, C. Sagui, S. Schott-Verdugo, J. Shen, -C. Simmerling, N.R.Skrynnikov, J. Smith, J. Swails, R.C. Walker, J. Wang, -L. Wilson, R.M. Wolf, X. Wu, Y. Xiong, Y. Xue, D.M. York -and P.A. Kollman (2020), -AMBER 2020, University of California, San Francisco. -""" - -site_contacts = 'Sandipan Mohanty <s.mohanty@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34',), -] -dependencies = [ - # ('CMake', '3.18.0'), - ('FFTW', '3.3.8'), - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', '-Python-%(pyver)s', ('gcccoremkl', '9.3.0-2020.2.254')), - ('Boost', '1.74.0', '-nompi'), - ('flex', '2.6.3'), - ('NCCL', '2.8.3-1', '-CUDA-11.0'), -] -# Something in the source is causing flex 2.6.4 to SEGFAULT. Keep it at 2.6.3 -# until it is fixed. /SM 2021-05-13 - -sources = [ - 'AmberTools20.tar.bz2', - 'Amber20.tar.bz2', -] - -separate_build_dir = True -local_build_mpi_parts = "TRUE" -local_build_cuda_parts = "TRUE" -local_build_cuda_nccl = "TRUE" - -preconfigopts = "CC=gcc && CXX=g++ && COMPILER=GNU " -preconfigopts += " && cd %(builddir)s/amber20_src && " -preconfigopts += " ./update_amber --update && cd ../easybuild_obj && " - -configopts = "-DCOMPILER=GNU -DCHECK_UPDATES=OFF -DAPPLY_UPDATES=OFF -DBUILD_GUI=FALSE " -configopts += " -DINSTALL_TESTS=TRUE -DMPI=%s " % local_build_mpi_parts -configopts += " -DDOWNLOAD_MINICONDA=FALSE -DTRUST_SYSTEM_LIBS=TRUE " -configopts += " -DCUDA=%s " % local_build_cuda_parts -configopts += " -DNCCL=%s " % local_build_cuda_nccl -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - -modextravars = { - 'AMBERHOME': '%(installdir)s/', -} - -modluafooter = ''' -add_property("arch","gpu") -''' - -group = "amber" - -modloadmsg = ''' -The access to this software is restricted to members of the group "amber". -The JSC has a site licence for academic users. If you would like to get -access please see: -"http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/Chemistry/AmberConfirmationOfLicence.html" -''' - -moduleclass = 'bio' diff --git a/Golden_Repo/a/ANTLR/ANTLR-2.7.7-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/a/ANTLR/ANTLR-2.7.7-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index d631fd2fe97db6095484bb42b31ee86cc3efd78c..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ANTLR/ANTLR-2.7.7-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ANTLR' -version = "2.7.7" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'http://www.antlr2.org/' -description = """ANTLR, ANother Tool for Language Recognition, (formerly PCCTS) - is a language tool that provides a framework for constructing recognizers, - compilers, and translators from grammatical descriptions containing - Java, C#, C++, or Python actions. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://www.antlr2.org/download/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = ['%(name)s-%(version)s_includes.patch'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Java', '15', '', SYSTEM), - ('Python', '3.8.5'), -] - -configopts = '--disable-examples --disable-csharp ' - -sanity_check_paths = { - 'files': ['bin/antlr', 'bin/antlr-config'], - 'dirs': ['include'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/a/ANTLR/ANTLR-2.7.7-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/a/ANTLR/ANTLR-2.7.7-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index ee3ab68467797446a6bae0c3278ff2f2c11bbc0a..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ANTLR/ANTLR-2.7.7-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ANTLR' -version = "2.7.7" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'http://www.antlr2.org/' -description = """ANTLR, ANother Tool for Language Recognition, (formerly PCCTS) - is a language tool that provides a framework for constructing recognizers, - compilers, and translators from grammatical descriptions containing - Java, C#, C++, or Python actions. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://www.antlr2.org/download/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = ['%(name)s-%(version)s_includes.patch'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Java', '1.8', '', SYSTEM), - ('Python', '3.8.5'), -] - -configopts = '--disable-examples --disable-csharp ' - -sanity_check_paths = { - 'files': ['bin/antlr', 'bin/antlr-config'], - 'dirs': ['include'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/a/AOCC/AOCC-2.3.0-GCCcore-9.3.0.eb b/Golden_Repo/a/AOCC/AOCC-2.3.0-GCCcore-9.3.0.eb deleted file mode 100644 index c61cada37fd429d4bf6ddd1b72bdc4c11bb418ab..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AOCC/AOCC-2.3.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'AOCC' -version = '2.3.0' - -homepage = 'https://developer.amd.com/amd-aocc/' -description = "AMD Optimized C/C++ & Fortran compilers (AOCC) based on LLVM 11.0" - -# Clang also depends on libstdc++ during runtime, but this dependency is -# already specified as the toolchain. -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://developer.amd.com/wordpress/media/files/'] -sources = ['aocc-compiler-%(version)s.tar'] -checksums = [ - # aocc-compiler-2.3.0.tar - '9f8a1544a5268a7fb8cd21ac4bdb3f8d1571949d1de5ca48e2d3309928fc3d15', -] - -dependencies = [ - ('binutils', '2.34'), - ('ncurses', '6.2'), - ('zlib', '1.2.11'), - ('libxml2', '2.9.10'), -] - -moduleclass = 'devel' diff --git a/Golden_Repo/a/AOCC/AOCC-3.0.0-GCCcore-10.3.0.eb b/Golden_Repo/a/AOCC/AOCC-3.0.0-GCCcore-10.3.0.eb deleted file mode 100644 index 330f37add2544f963f5da3caf268fe9b9c30f3e8..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AOCC/AOCC-3.0.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'AOCC' -version = '3.0.0' - -homepage = 'https://developer.amd.com/amd-aocc/' -description = "AMD Optimized C/C++ & Fortran compilers (AOCC) based on LLVM 12.0" - -# Clang also depends on libstdc++ during runtime, but this dependency is -# already specified as the toolchain. -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://developer.amd.com/wordpress/media/files/'] -sources = ['aocc-compiler-%(version)s.tar'] -checksums = [ - # aocc-compiler-3.0.0.tar - '4ff269b1693856b9920f57e3c85ce488c8b81123ddc88682a3ff283979362227', -] - -dependencies = [ - ('binutils', '2.36.1'), - ('ncurses', '6.2'), - ('zlib', '1.2.11'), - ('libxml2', '2.9.10'), -] - -moduleclass = 'devel' diff --git a/Golden_Repo/a/APR-util/APR-util-1.6.1-GCCcore-10.3.0.eb b/Golden_Repo/a/APR-util/APR-util-1.6.1-GCCcore-10.3.0.eb deleted file mode 100644 index 2ef1065a376a352e2ae759eb2485592d3c465173..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/APR-util/APR-util-1.6.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'APR-util' -version = '1.6.1' - -homepage = 'http://apr.apache.org/' -description = """ -Apache Portable Runtime (APR) util libraries. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://archive.apache.org/dist/apr/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('APR', '1.7.0'), - ('SQLite', '3.32.3'), - ('expat', '2.2.9'), -] - -configopts = "--with-apr=$EBROOTAPR/bin/apr-1-config --with-sqlite3=$EBROOTSQLITE --with-expat=$EBROOTEXPAT " - -sanity_check_paths = { - 'files': ["bin/apu-1-config", "lib/libaprutil-1.%s" % SHLIB_EXT, "lib/libaprutil-1.a"], - 'dirs': ["include/apr-1"], -} - -parallel = 1 - -moduleclass = 'tools' diff --git a/Golden_Repo/a/APR-util/APR-util-1.6.1-GCCcore-9.3.0.eb b/Golden_Repo/a/APR-util/APR-util-1.6.1-GCCcore-9.3.0.eb deleted file mode 100644 index 7be6ae0261bee5507ae1889399f15ea076a1b7fc..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/APR-util/APR-util-1.6.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'APR-util' -version = '1.6.1' - -homepage = 'http://apr.apache.org/' -description = """ -Apache Portable Runtime (APR) util libraries. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://archive.apache.org/dist/apr/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('APR', '1.7.0'), - ('SQLite', '3.32.3'), - ('expat', '2.2.9'), -] - -configopts = "--with-apr=$EBROOTAPR/bin/apr-1-config --with-sqlite3=$EBROOTSQLITE --with-expat=$EBROOTEXPAT " - -sanity_check_paths = { - 'files': ["bin/apu-1-config", "lib/libaprutil-1.%s" % SHLIB_EXT, "lib/libaprutil-1.a"], - 'dirs': ["include/apr-1"], -} - -parallel = 1 - -moduleclass = 'tools' diff --git a/Golden_Repo/a/APR/APR-1.7.0-GCCcore-10.3.0.eb b/Golden_Repo/a/APR/APR-1.7.0-GCCcore-10.3.0.eb deleted file mode 100644 index 4932f0300ee47c1032ad2825a2d27f01fe265c37..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/APR/APR-1.7.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'APR' -version = '1.7.0' - -homepage = 'http://apr.apache.org/' -description = """ -Apache Portable Runtime (APR) libraries. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://archive.apache.org/dist/apr/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1') -] - -sanity_check_paths = { - 'files': ["bin/apr-1-config", "lib/libapr-1.%s" % SHLIB_EXT, "lib/libapr-1.a"], - 'dirs': ["include/apr-1"], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/a/APR/APR-1.7.0-GCCcore-9.3.0.eb b/Golden_Repo/a/APR/APR-1.7.0-GCCcore-9.3.0.eb deleted file mode 100644 index 910396571c8be0668f6cce1fd77e15d6ab354ba8..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/APR/APR-1.7.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'APR' -version = '1.7.0' - -homepage = 'http://apr.apache.org/' -description = """ -Apache Portable Runtime (APR) libraries. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://archive.apache.org/dist/apr/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34') -] - -sanity_check_paths = { - 'files': ["bin/apr-1-config", "lib/libapr-1.%s" % SHLIB_EXT, "lib/libapr-1.a"], - 'dirs': ["include/apr-1"], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gcccoremkl-9.3.0-2020.2.254-nompi.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gcccoremkl-9.3.0-2020.2.254-nompi.eb deleted file mode 100644 index e917f93387fa7c3738b0b532a9f7e2b4d700ce70..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gcccoremkl-9.3.0-2020.2.254-nompi.eb +++ /dev/null @@ -1,56 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.7.0' -versionsuffix = '-nompi' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] -checksums = ['972e3fc3cd0b9d6b5a737c9bf6fd07515c0d6549319d4ffb06970e64fa3cc2d6'] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples_gpsmkl.patch' -] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -# We hide it since this should be used just for Jupyter and the MPI version should be preferred for normal cases -hidden = True - -preconfigopts = 'sh bootstrap &&' -configopts = '--with-pic --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, ], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gomkl-2020.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gomkl-2020.eb deleted file mode 100644 index e0526a943f35127c60195c3d91fda1db572b0288..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gomkl-2020.eb +++ /dev/null @@ -1,54 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.7.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2020'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] -checksums = ['972e3fc3cd0b9d6b5a737c9bf6fd07515c0d6549319d4ffb06970e64fa3cc2d6'] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples_gpsmkl.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gpsmkl-2020.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gpsmkl-2020.eb deleted file mode 100644 index c2579e8029c4126b5f7a8cdfeac2abb233e0db24..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-gpsmkl-2020.eb +++ /dev/null @@ -1,54 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.7.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] -checksums = ['972e3fc3cd0b9d6b5a737c9bf6fd07515c0d6549319d4ffb06970e64fa3cc2d6'] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples_gpsmkl.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-install-arpack-examples.patch b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-install-arpack-examples.patch deleted file mode 100644 index 50d45acd586a1fe8a929c5bb8121403b0f05f460..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-install-arpack-examples.patch +++ /dev/null @@ -1,541 +0,0 @@ ---- arpack-ng-3.7.0/PARPACK_CHANGES 2019-01-12 16:24:12.000000000 +0100 -+++ arpack-ng-3.7.0-ok/PARPACK_CHANGES 2019-04-10 12:38:51.000000000 +0200 -@@ -332,6 +332,8 @@ - 35. 10/24/2003 p[c,z]naup2.f contain some lines that were incorrectly - generated by cpp. These lines have been removed in the new patch. - 36. 09/18/2016 p*apps.f and p*aitr.f contain condition to fetch machine epsilon. -- When different p*aupd call use communicator with different number of CPU these -+ When different p*aupd call use communicator with -+ different number of CPU these - conditions cause deadlock. Variables inside conditions are moved to global -- to be reset each first iteration -\ Kein Zeilenumbruch am Dateiende. -+ to be reset each first iteration -+ ---- arpack-ng-3.7.0/EXAMPLES/BAND/Makefile_band_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/BAND/Makefile_band_intel 2019-04-12 08:14:53.000000000 +0200 -@@ -0,0 +1,159 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cnbdr1_OBJECTS = cnbdr1.o cnband.o -+cnbdr2_OBJECTS = cnbdr2.o cnband.o -+cnbdr3_OBJECTS = cnbdr3.o cnband.o -+cnbdr4_OBJECTS = cnbdr4.o cnband.o -+dnbdr1_OBJECTS = dnbdr1.o dnband.o -+dnbdr2_OBJECTS = dnbdr2.o dnband.o -+dnbdr3_OBJECTS = dnbdr3.o dnband.o -+dnbdr4_OBJECTS = dnbdr4.o dnband.o -+dnbdr5_OBJECTS = dnbdr5.o dnband.o -+dnbdr6_OBJECTS = dnbdr6.o dnband.o -+dsbdr1_OBJECTS = dsbdr1.o dsband.o -+dsbdr2_OBJECTS = dsbdr2.o dsband.o -+dsbdr3_OBJECTS = dsbdr3.o dsband.o -+dsbdr4_OBJECTS = dsbdr4.o dsband.o -+dsbdr5_OBJECTS = dsbdr5.o dsband.o -+dsbdr6_OBJECTS = dsbdr6.o dsband.o -+snbdr1_OBJECTS = snbdr1.o snband.o -+snbdr2_OBJECTS = snbdr2.o snband.o -+snbdr3_OBJECTS = snbdr3.o snband.o -+snbdr4_OBJECTS = snbdr4.o snband.o -+snbdr5_OBJECTS = snbdr5.o snband.o -+snbdr6_OBJECTS = snbdr6.o snband.o -+ssbdr1_OBJECTS = ssbdr1.o ssband.o -+ssbdr2_OBJECTS = ssbdr2.o ssband.o -+ssbdr3_OBJECTS = ssbdr3.o ssband.o -+ssbdr4_OBJECTS = ssbdr4.o ssband.o -+ssbdr5_OBJECTS = ssbdr5.o ssband.o -+ssbdr6_OBJECTS = ssbdr6.o ssband.o -+znbdr1_OBJECTS = znbdr1.o znband.o -+znbdr2_OBJECTS = znbdr2.o znband.o -+znbdr3_OBJECTS = znbdr3.o znband.o -+znbdr4_OBJECTS = znbdr4.o znband.o -+ -+all : cnbdr1 cnbdr2 cnbdr3 cnbdr4 \ -+ dnbdr1 dnbdr2 dnbdr3 dnbdr4 dnbdr5 dnbdr6 \ -+ dsbdr1 dsbdr2 dsbdr3 dsbdr4 dsbdr5 dsbdr6 \ -+ snbdr1 snbdr2 snbdr3 snbdr4 snbdr5 snbdr6 \ -+ ssbdr1 ssbdr2 ssbdr3 ssbdr4 ssbdr5 ssbdr6 \ -+ znbdr1 znbdr2 znbdr3 znbdr4 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cnbdr1 : $(cnbdr1_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr2 : $(cnbdr2_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr3 : $(cnbdr3_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr4 : $(cnbdr4_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr1 : $(dnbdr1_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr2 : $(dnbdr2_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr3 : $(dnbdr3_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr4 : $(dnbdr4_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr5 : $(dnbdr5_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr6 : $(dnbdr6_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr1 : $(dsbdr1_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr2 : $(dsbdr2_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr3 : $(dsbdr3_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr4 : $(dsbdr4_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr5 : $(dsbdr5_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr6 : $(dsbdr6_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr1 : $(snbdr1_OBJECTS) -+ $(F77LD) -o $@ $(snbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr2 : $(snbdr2_OBJECTS) -+ $(F77LD) -o $@ $(snbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr3 : $(snbdr3_OBJECTS) -+ $(F77LD) -o $@ $(snbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr4 : $(snbdr4_OBJECTS) -+ $(F77LD) -o $@ $(snbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr5 : $(snbdr5_OBJECTS) -+ $(F77LD) -o $@ $(snbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr6 : $(snbdr6_OBJECTS) -+ $(F77LD) -o $@ $(snbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr1 : $(ssbdr1_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr2 : $(ssbdr2_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr3 : $(ssbdr3_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr4 : $(ssbdr4_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr5 : $(ssbdr5_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr6 : $(ssbdr6_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr1 : $(znbdr1_OBJECTS) -+ $(F77LD) -o $@ $(znbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr2 : $(znbdr2_OBJECTS) -+ $(F77LD) -o $@ $(znbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr3 : $(znbdr3_OBJECTS) -+ $(F77LD) -o $@ $(znbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr4 : $(znbdr4_OBJECTS) -+ $(F77LD) -o $@ $(znbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/COMPLEX/Makefile_complex_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/COMPLEX/Makefile_complex_intel 2019-04-10 12:32:17.000000000 +0200 -@@ -0,0 +1,59 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cndrv1_OBJECTS = cndrv1.o -+cndrv2_OBJECTS = cndrv2.o -+cndrv3_OBJECTS = cndrv3.o -+cndrv4_OBJECTS = cndrv4.o -+zndrv1_OBJECTS = zndrv1.o -+zndrv2_OBJECTS = zndrv2.o -+zndrv3_OBJECTS = zndrv3.o -+zndrv4_OBJECTS = zndrv4.o -+ -+all : cndrv1 cndrv2 cndrv3 cndrv4 \ -+ zndrv1 zndrv2 zndrv3 zndrv4 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cndrv1 : $(cndrv1_OBJECTS) -+ $(F77LD) -o $@ $(cndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv2 : $(cndrv2_OBJECTS) -+ $(F77LD) -o $@ $(cndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv3 : $(cndrv3_OBJECTS) -+ $(F77LD) -o $@ $(cndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv4 : $(cndrv4_OBJECTS) -+ $(F77LD) -o $@ $(cndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv1 : $(zndrv1_OBJECTS) -+ $(F77LD) -o $@ $(zndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv2 : $(zndrv2_OBJECTS) -+ $(F77LD) -o $@ $(zndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv3 : $(zndrv3_OBJECTS) -+ $(F77LD) -o $@ $(zndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv4 : $(zndrv4_OBJECTS) -+ $(F77LD) -o $@ $(zndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/NONSYM/Makefile_nonsym_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/NONSYM/Makefile_nonsym_intel 2019-04-10 12:32:17.000000000 +0200 -@@ -0,0 +1,75 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dndrv1_OBJECTS = dndrv1.o -+dndrv2_OBJECTS = dndrv2.o -+dndrv3_OBJECTS = dndrv3.o -+dndrv4_OBJECTS = dndrv4.o -+dndrv5_OBJECTS = dndrv5.o -+dndrv6_OBJECTS = dndrv6.o -+sndrv1_OBJECTS = sndrv1.o -+sndrv2_OBJECTS = sndrv2.o -+sndrv3_OBJECTS = sndrv3.o -+sndrv4_OBJECTS = sndrv4.o -+sndrv5_OBJECTS = sndrv5.o -+sndrv6_OBJECTS = sndrv6.o -+ -+all : dndrv1 dndrv2 dndrv3 dndrv4 dndrv5 dndrv6 \ -+ sndrv1 sndrv2 sndrv3 sndrv4 sndrv5 sndrv6 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+dndrv1 : $(dndrv1_OBJECTS) -+ $(F77LD) -o $@ $(dndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv2 : $(dndrv2_OBJECTS) -+ $(F77LD) -o $@ $(dndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv3 : $(dndrv3_OBJECTS) -+ $(F77LD) -o $@ $(dndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv4 : $(dndrv4_OBJECTS) -+ $(F77LD) -o $@ $(dndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv5 : $(dndrv5_OBJECTS) -+ $(F77LD) -o $@ $(dndrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv6 : $(dndrv6_OBJECTS) -+ $(F77LD) -o $@ $(dndrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv1 : $(sndrv1_OBJECTS) -+ $(F77LD) -o $@ $(sndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv2 : $(sndrv2_OBJECTS) -+ $(F77LD) -o $@ $(sndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv3 : $(sndrv3_OBJECTS) -+ $(F77LD) -o $@ $(sndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv4 : $(sndrv4_OBJECTS) -+ $(F77LD) -o $@ $(sndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv5 : $(sndrv5_OBJECTS) -+ $(F77LD) -o $@ $(sndrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv6 : $(sndrv6_OBJECTS) -+ $(F77LD) -o $@ $(sndrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/PARPACK/EXAMPLES/MPI/Makefile_parpack_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/PARPACK/EXAMPLES/MPI/Makefile_parpack_intel 2019-04-10 12:32:17.000000000 +0200 -@@ -0,0 +1,51 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+PARPACKLIB = -L$(ARPACK_LIB) -lparpack -larpack -+ -+pcndrv1_OBJECTS = pcndrv1.o -+pdndrv1_OBJECTS = pdndrv1.o -+pdndrv3_OBJECTS = pdndrv3.o -+psndrv1_OBJECTS = psndrv1.o -+psndrv3_OBJECTS = psndrv3.o -+pzndrv1_OBJECTS = pzndrv1.o -+ -+all : pcndrv1 pdndrv1 pdndrv3 \ -+ psndrv1 psndrv3 pzndrv1 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+pcndrv1 : $(pcndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pcndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pdndrv1 : $(pdndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pdndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pdndrv3 : $(pdndrv3_OBJECTS) -+ $(F77LD) -o $@ $(pdndrv3_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+psndrv1 : $(psndrv1_OBJECTS) -+ $(F77LD) -o $@ $(psndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+psndrv3 : $(psndrv3_OBJECTS) -+ $(F77LD) -o $@ $(psndrv3_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pzndrv1 : $(pzndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pzndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/SIMPLE/Makefile_simple_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/SIMPLE/Makefile_simple_intel 2019-04-11 16:07:26.000000000 +0200 -@@ -0,0 +1,51 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cnsimp_OBJECTS = cnsimp.o -+dnsimp_OBJECTS = dnsimp.o -+dssimp_OBJECTS = dssimp.o -+snsimp_OBJECTS = snsimp.o -+sssimp_OBJECTS = sssimp.o -+znsimp_OBJECTS = znsimp.o -+ -+all : cnsimp dnsimp dssimp \ -+ snsimp sssimp znsimp -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cnsimp : $(cnsimp_OBJECTS) -+ $(F77LD) -o $@ $(cnsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnsimp : $(dnsimp_OBJECTS) -+ $(F77LD) -o $@ $(dnsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dssimp : $(dssimp_OBJECTS) -+ $(F77LD) -o $@ $(dssimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snsimp : $(snsimp_OBJECTS) -+ $(F77LD) -o $@ $(snsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sssimp : $(sssimp_OBJECTS) -+ $(F77LD) -o $@ $(sssimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znsimp : $(znsimp_OBJECTS) -+ $(F77LD) -o $@ $(znsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/SVD/Makefile_svd_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/SVD/Makefile_svd_intel 2019-04-11 16:13:20.000000000 +0200 -@@ -0,0 +1,36 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dsvd_OBJECTS = dsvd.o -+ssvd_OBJECTS = ssvd.o -+ -+all : dsvd \ -+ ssvd -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+ -+dsvd : $(dsvd_OBJECTS) -+ $(F77LD) -o $@ $(dsvd_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssvd : $(ssvd_OBJECTS) -+ $(F77LD) -o $@ $(ssvd_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/SYM/Makefile_sym_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/SYM/Makefile_sym_intel 2019-04-10 12:32:17.000000000 +0200 -@@ -0,0 +1,75 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dsdrv1_OBJECTS = dsdrv1.o -+dsdrv2_OBJECTS = dsdrv2.o -+dsdrv3_OBJECTS = dsdrv3.o -+dsdrv4_OBJECTS = dsdrv4.o -+dsdrv5_OBJECTS = dsdrv5.o -+dsdrv6_OBJECTS = dsdrv6.o -+ssdrv1_OBJECTS = ssdrv1.o -+ssdrv2_OBJECTS = ssdrv2.o -+ssdrv3_OBJECTS = ssdrv3.o -+ssdrv4_OBJECTS = ssdrv4.o -+ssdrv5_OBJECTS = ssdrv5.o -+ssdrv6_OBJECTS = ssdrv6.o -+ -+all : dsdrv1 dsdrv2 dsdrv3 dsdrv4 dsdrv5 dsdrv6 \ -+ ssdrv1 ssdrv2 ssdrv3 ssdrv4 ssdrv5 ssdrv6 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+dsdrv1 : $(dsdrv1_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv2 : $(dsdrv2_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv3 : $(dsdrv3_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv4 : $(dsdrv4_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv5 : $(dsdrv5_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv6 : $(dsdrv6_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv1 : $(ssdrv1_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv2 : $(ssdrv2_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv3 : $(ssdrv3_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv4 : $(ssdrv4_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv5 : $(ssdrv5_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv6 : $(ssdrv6_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-install-arpack-examples_gpsmkl.patch b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-install-arpack-examples_gpsmkl.patch deleted file mode 100644 index aa0c68e86759dc8d815e64f3d52effa6fb2c3c82..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-install-arpack-examples_gpsmkl.patch +++ /dev/null @@ -1,541 +0,0 @@ ---- arpack-ng-3.7.0/PARPACK_CHANGES 2019-01-12 16:24:12.000000000 +0100 -+++ arpack-ng-3.7.0-ok/PARPACK_CHANGES 2019-04-10 12:38:51.000000000 +0200 -@@ -332,6 +332,8 @@ - 35. 10/24/2003 p[c,z]naup2.f contain some lines that were incorrectly - generated by cpp. These lines have been removed in the new patch. - 36. 09/18/2016 p*apps.f and p*aitr.f contain condition to fetch machine epsilon. -- When different p*aupd call use communicator with different number of CPU these -+ When different p*aupd call use communicator with -+ different number of CPU these - conditions cause deadlock. Variables inside conditions are moved to global -- to be reset each first iteration -\ Kein Zeilenumbruch am Dateiende. -+ to be reset each first iteration -+ ---- arpack-ng-3.7.0/EXAMPLES/BAND/Makefile_band 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/BAND/Makefile_band 2020-08-21 10:03:31.647089000 +0200 -@@ -0,0 +1,159 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cnbdr1_OBJECTS = cnbdr1.o cnband.o -+cnbdr2_OBJECTS = cnbdr2.o cnband.o -+cnbdr3_OBJECTS = cnbdr3.o cnband.o -+cnbdr4_OBJECTS = cnbdr4.o cnband.o -+dnbdr1_OBJECTS = dnbdr1.o dnband.o -+dnbdr2_OBJECTS = dnbdr2.o dnband.o -+dnbdr3_OBJECTS = dnbdr3.o dnband.o -+dnbdr4_OBJECTS = dnbdr4.o dnband.o -+dnbdr5_OBJECTS = dnbdr5.o dnband.o -+dnbdr6_OBJECTS = dnbdr6.o dnband.o -+dsbdr1_OBJECTS = dsbdr1.o dsband.o -+dsbdr2_OBJECTS = dsbdr2.o dsband.o -+dsbdr3_OBJECTS = dsbdr3.o dsband.o -+dsbdr4_OBJECTS = dsbdr4.o dsband.o -+dsbdr5_OBJECTS = dsbdr5.o dsband.o -+dsbdr6_OBJECTS = dsbdr6.o dsband.o -+snbdr1_OBJECTS = snbdr1.o snband.o -+snbdr2_OBJECTS = snbdr2.o snband.o -+snbdr3_OBJECTS = snbdr3.o snband.o -+snbdr4_OBJECTS = snbdr4.o snband.o -+snbdr5_OBJECTS = snbdr5.o snband.o -+snbdr6_OBJECTS = snbdr6.o snband.o -+ssbdr1_OBJECTS = ssbdr1.o ssband.o -+ssbdr2_OBJECTS = ssbdr2.o ssband.o -+ssbdr3_OBJECTS = ssbdr3.o ssband.o -+ssbdr4_OBJECTS = ssbdr4.o ssband.o -+ssbdr5_OBJECTS = ssbdr5.o ssband.o -+ssbdr6_OBJECTS = ssbdr6.o ssband.o -+znbdr1_OBJECTS = znbdr1.o znband.o -+znbdr2_OBJECTS = znbdr2.o znband.o -+znbdr3_OBJECTS = znbdr3.o znband.o -+znbdr4_OBJECTS = znbdr4.o znband.o -+ -+all : cnbdr1 cnbdr2 cnbdr3 cnbdr4 \ -+ dnbdr1 dnbdr2 dnbdr3 dnbdr4 dnbdr5 dnbdr6 \ -+ dsbdr1 dsbdr2 dsbdr3 dsbdr4 dsbdr5 dsbdr6 \ -+ snbdr1 snbdr2 snbdr3 snbdr4 snbdr5 snbdr6 \ -+ ssbdr1 ssbdr2 ssbdr3 ssbdr4 ssbdr5 ssbdr6 \ -+ znbdr1 znbdr2 znbdr3 znbdr4 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cnbdr1 : $(cnbdr1_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr2 : $(cnbdr2_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr3 : $(cnbdr3_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr4 : $(cnbdr4_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr1 : $(dnbdr1_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr2 : $(dnbdr2_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr3 : $(dnbdr3_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr4 : $(dnbdr4_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr5 : $(dnbdr5_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr6 : $(dnbdr6_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr1 : $(dsbdr1_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr2 : $(dsbdr2_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr3 : $(dsbdr3_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr4 : $(dsbdr4_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr5 : $(dsbdr5_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr6 : $(dsbdr6_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr1 : $(snbdr1_OBJECTS) -+ $(F77LD) -o $@ $(snbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr2 : $(snbdr2_OBJECTS) -+ $(F77LD) -o $@ $(snbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr3 : $(snbdr3_OBJECTS) -+ $(F77LD) -o $@ $(snbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr4 : $(snbdr4_OBJECTS) -+ $(F77LD) -o $@ $(snbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr5 : $(snbdr5_OBJECTS) -+ $(F77LD) -o $@ $(snbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr6 : $(snbdr6_OBJECTS) -+ $(F77LD) -o $@ $(snbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr1 : $(ssbdr1_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr2 : $(ssbdr2_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr3 : $(ssbdr3_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr4 : $(ssbdr4_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr5 : $(ssbdr5_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr6 : $(ssbdr6_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr1 : $(znbdr1_OBJECTS) -+ $(F77LD) -o $@ $(znbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr2 : $(znbdr2_OBJECTS) -+ $(F77LD) -o $@ $(znbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr3 : $(znbdr3_OBJECTS) -+ $(F77LD) -o $@ $(znbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr4 : $(znbdr4_OBJECTS) -+ $(F77LD) -o $@ $(znbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/COMPLEX/Makefile_complex 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/COMPLEX/Makefile_complex 2020-08-21 10:03:50.732460000 +0200 -@@ -0,0 +1,59 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cndrv1_OBJECTS = cndrv1.o -+cndrv2_OBJECTS = cndrv2.o -+cndrv3_OBJECTS = cndrv3.o -+cndrv4_OBJECTS = cndrv4.o -+zndrv1_OBJECTS = zndrv1.o -+zndrv2_OBJECTS = zndrv2.o -+zndrv3_OBJECTS = zndrv3.o -+zndrv4_OBJECTS = zndrv4.o -+ -+all : cndrv1 cndrv2 cndrv3 cndrv4 \ -+ zndrv1 zndrv2 zndrv3 zndrv4 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cndrv1 : $(cndrv1_OBJECTS) -+ $(F77LD) -o $@ $(cndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv2 : $(cndrv2_OBJECTS) -+ $(F77LD) -o $@ $(cndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv3 : $(cndrv3_OBJECTS) -+ $(F77LD) -o $@ $(cndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv4 : $(cndrv4_OBJECTS) -+ $(F77LD) -o $@ $(cndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv1 : $(zndrv1_OBJECTS) -+ $(F77LD) -o $@ $(zndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv2 : $(zndrv2_OBJECTS) -+ $(F77LD) -o $@ $(zndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv3 : $(zndrv3_OBJECTS) -+ $(F77LD) -o $@ $(zndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv4 : $(zndrv4_OBJECTS) -+ $(F77LD) -o $@ $(zndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/NONSYM/Makefile_nonsym 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/NONSYM/Makefile_nonsym 2020-08-21 10:03:59.895397000 +0200 -@@ -0,0 +1,75 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dndrv1_OBJECTS = dndrv1.o -+dndrv2_OBJECTS = dndrv2.o -+dndrv3_OBJECTS = dndrv3.o -+dndrv4_OBJECTS = dndrv4.o -+dndrv5_OBJECTS = dndrv5.o -+dndrv6_OBJECTS = dndrv6.o -+sndrv1_OBJECTS = sndrv1.o -+sndrv2_OBJECTS = sndrv2.o -+sndrv3_OBJECTS = sndrv3.o -+sndrv4_OBJECTS = sndrv4.o -+sndrv5_OBJECTS = sndrv5.o -+sndrv6_OBJECTS = sndrv6.o -+ -+all : dndrv1 dndrv2 dndrv3 dndrv4 dndrv5 dndrv6 \ -+ sndrv1 sndrv2 sndrv3 sndrv4 sndrv5 sndrv6 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+dndrv1 : $(dndrv1_OBJECTS) -+ $(F77LD) -o $@ $(dndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv2 : $(dndrv2_OBJECTS) -+ $(F77LD) -o $@ $(dndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv3 : $(dndrv3_OBJECTS) -+ $(F77LD) -o $@ $(dndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv4 : $(dndrv4_OBJECTS) -+ $(F77LD) -o $@ $(dndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv5 : $(dndrv5_OBJECTS) -+ $(F77LD) -o $@ $(dndrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv6 : $(dndrv6_OBJECTS) -+ $(F77LD) -o $@ $(dndrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv1 : $(sndrv1_OBJECTS) -+ $(F77LD) -o $@ $(sndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv2 : $(sndrv2_OBJECTS) -+ $(F77LD) -o $@ $(sndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv3 : $(sndrv3_OBJECTS) -+ $(F77LD) -o $@ $(sndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv4 : $(sndrv4_OBJECTS) -+ $(F77LD) -o $@ $(sndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv5 : $(sndrv5_OBJECTS) -+ $(F77LD) -o $@ $(sndrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv6 : $(sndrv6_OBJECTS) -+ $(F77LD) -o $@ $(sndrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/PARPACK/EXAMPLES/MPI/Makefile_parpack 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/PARPACK/EXAMPLES/MPI/Makefile_parpack 2020-08-21 10:04:09.985064000 +0200 -@@ -0,0 +1,51 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+PARPACKLIB = -L$(ARPACK_LIB) -lparpack -larpack -+ -+pcndrv1_OBJECTS = pcndrv1.o -+pdndrv1_OBJECTS = pdndrv1.o -+pdndrv3_OBJECTS = pdndrv3.o -+psndrv1_OBJECTS = psndrv1.o -+psndrv3_OBJECTS = psndrv3.o -+pzndrv1_OBJECTS = pzndrv1.o -+ -+all : pcndrv1 pdndrv1 pdndrv3 \ -+ psndrv1 psndrv3 pzndrv1 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+pcndrv1 : $(pcndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pcndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pdndrv1 : $(pdndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pdndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pdndrv3 : $(pdndrv3_OBJECTS) -+ $(F77LD) -o $@ $(pdndrv3_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+psndrv1 : $(psndrv1_OBJECTS) -+ $(F77LD) -o $@ $(psndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+psndrv3 : $(psndrv3_OBJECTS) -+ $(F77LD) -o $@ $(psndrv3_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pzndrv1 : $(pzndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pzndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/SIMPLE/Makefile_simple 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/SIMPLE/Makefile_simple 2020-08-21 10:04:20.152407000 +0200 -@@ -0,0 +1,51 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cnsimp_OBJECTS = cnsimp.o -+dnsimp_OBJECTS = dnsimp.o -+dssimp_OBJECTS = dssimp.o -+snsimp_OBJECTS = snsimp.o -+sssimp_OBJECTS = sssimp.o -+znsimp_OBJECTS = znsimp.o -+ -+all : cnsimp dnsimp dssimp \ -+ snsimp sssimp znsimp -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cnsimp : $(cnsimp_OBJECTS) -+ $(F77LD) -o $@ $(cnsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnsimp : $(dnsimp_OBJECTS) -+ $(F77LD) -o $@ $(dnsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dssimp : $(dssimp_OBJECTS) -+ $(F77LD) -o $@ $(dssimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snsimp : $(snsimp_OBJECTS) -+ $(F77LD) -o $@ $(snsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sssimp : $(sssimp_OBJECTS) -+ $(F77LD) -o $@ $(sssimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znsimp : $(znsimp_OBJECTS) -+ $(F77LD) -o $@ $(znsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/SVD/Makefile_svd 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/SVD/Makefile_svd 2020-08-21 10:04:27.608296000 +0200 -@@ -0,0 +1,36 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dsvd_OBJECTS = dsvd.o -+ssvd_OBJECTS = ssvd.o -+ -+all : dsvd \ -+ ssvd -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+ -+dsvd : $(dsvd_OBJECTS) -+ $(F77LD) -o $@ $(dsvd_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssvd : $(ssvd_OBJECTS) -+ $(F77LD) -o $@ $(ssvd_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.7.0/EXAMPLES/SYM/Makefile_sym 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.7.0-ok/EXAMPLES/SYM/Makefile_sym 2020-08-21 10:04:36.671251000 +0200 -@@ -0,0 +1,75 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dsdrv1_OBJECTS = dsdrv1.o -+dsdrv2_OBJECTS = dsdrv2.o -+dsdrv3_OBJECTS = dsdrv3.o -+dsdrv4_OBJECTS = dsdrv4.o -+dsdrv5_OBJECTS = dsdrv5.o -+dsdrv6_OBJECTS = dsdrv6.o -+ssdrv1_OBJECTS = ssdrv1.o -+ssdrv2_OBJECTS = ssdrv2.o -+ssdrv3_OBJECTS = ssdrv3.o -+ssdrv4_OBJECTS = ssdrv4.o -+ssdrv5_OBJECTS = ssdrv5.o -+ssdrv6_OBJECTS = ssdrv6.o -+ -+all : dsdrv1 dsdrv2 dsdrv3 dsdrv4 dsdrv5 dsdrv6 \ -+ ssdrv1 ssdrv2 ssdrv3 ssdrv4 ssdrv5 ssdrv6 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+dsdrv1 : $(dsdrv1_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv2 : $(dsdrv2_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv3 : $(dsdrv3_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv4 : $(dsdrv4_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv5 : $(dsdrv5_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv6 : $(dsdrv6_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv1 : $(ssdrv1_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv2 : $(ssdrv2_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv3 : $(ssdrv3_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv4 : $(ssdrv4_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv5 : $(ssdrv5_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv6 : $(ssdrv6_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-intel-2020.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-intel-2020.eb deleted file mode 100644 index 27c8d6b4adae0e39a9efff996caf0e9937a81e4f..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-intel-2020.eb +++ /dev/null @@ -1,54 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.7.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] -checksums = ['972e3fc3cd0b9d6b5a737c9bf6fd07515c0d6549319d4ffb06970e64fa3cc2d6'] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-intel-para-2020.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-intel-para-2020.eb deleted file mode 100644 index f86bb5277ff58e35678a360c5f2eab3d7d1ee2cb..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-intel-para-2020.eb +++ /dev/null @@ -1,54 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.7.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] -checksums = ['972e3fc3cd0b9d6b5a737c9bf6fd07515c0d6549319d4ffb06970e64fa3cc2d6'] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-iomkl-2020.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-iomkl-2020.eb deleted file mode 100644 index bc0c095071c458a69a7a180d0d9ffdcaeda7922d..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.7.0-iomkl-2020.eb +++ /dev/null @@ -1,54 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.7.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2020'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] -checksums = ['972e3fc3cd0b9d6b5a737c9bf6fd07515c0d6549319d4ffb06970e64fa3cc2d6'] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gcccoremkl-10.3.0-2021.2.0-nompi.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gcccoremkl-10.3.0-2021.2.0-nompi.eb deleted file mode 100644 index 9cfcb6e0d75a5b2df1782032a72e323d016e527f..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gcccoremkl-10.3.0-2021.2.0-nompi.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.8.0' -versionsuffix = '-nompi' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a has been installed in $EBROOTARPACKMINNG. - -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -# We hide it since this should be used just for Jupyter and the MPI version should be preferred for normal cases -hidden = True - -preconfigopts = 'sh bootstrap &&' -configopts = '--with-pic --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -sanity_check_paths = { - 'files': ["lib/libarpack.%s" % SHLIB_EXT, ], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gomkl-2021.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gomkl-2021.eb deleted file mode 100644 index fb6b8afb74922b2ab3a11d60a42bd8584fda93b3..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gomkl-2021.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.8.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/releases/tag/'] -sources = ["%(version)s.tar.gz"] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples_gpsmkl.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --enable-static --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gpsmkl-2021.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gpsmkl-2021.eb deleted file mode 100644 index 720ede5f6404df90527fc617786485a12cf492b0..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-gpsmkl-2021.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.8.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples_gpsmkl.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --enable-static --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-install-arpack-examples.patch b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-install-arpack-examples.patch deleted file mode 100644 index 8ec3b0762eea851e0e27c6c0f1c003a43ae8fefb..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-install-arpack-examples.patch +++ /dev/null @@ -1,538 +0,0 @@ ---- arpack-ng-3.8.0/PARPACK_CHANGES 2021-06-30 09:57:39.220219000 +0200 -+++ arpack-ng-3.8.0-ok/PARPACK_CHANGES 2021-06-30 10:25:30.148960874 +0200 -@@ -334,4 +334,6 @@ - 36. 09/18/2016 p*apps.f and p*aitr.f contain condition to fetch machine epsilon. - When different p*aupd call use communicator with different number of CPU these - conditions cause deadlock. Variables inside conditions are moved to global -- to be reset each first iteration -\ No newline at end of file -+ to be reset each first iteration -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/BAND/Makefile_band_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/BAND/Makefile_band_intel 2021-06-30 10:36:04.492823000 +0200 -@@ -0,0 +1,159 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cnbdr1_OBJECTS = cnbdr1.o cnband.o -+cnbdr2_OBJECTS = cnbdr2.o cnband.o -+cnbdr3_OBJECTS = cnbdr3.o cnband.o -+cnbdr4_OBJECTS = cnbdr4.o cnband.o -+dnbdr1_OBJECTS = dnbdr1.o dnband.o -+dnbdr2_OBJECTS = dnbdr2.o dnband.o -+dnbdr3_OBJECTS = dnbdr3.o dnband.o -+dnbdr4_OBJECTS = dnbdr4.o dnband.o -+dnbdr5_OBJECTS = dnbdr5.o dnband.o -+dnbdr6_OBJECTS = dnbdr6.o dnband.o -+dsbdr1_OBJECTS = dsbdr1.o dsband.o -+dsbdr2_OBJECTS = dsbdr2.o dsband.o -+dsbdr3_OBJECTS = dsbdr3.o dsband.o -+dsbdr4_OBJECTS = dsbdr4.o dsband.o -+dsbdr5_OBJECTS = dsbdr5.o dsband.o -+dsbdr6_OBJECTS = dsbdr6.o dsband.o -+snbdr1_OBJECTS = snbdr1.o snband.o -+snbdr2_OBJECTS = snbdr2.o snband.o -+snbdr3_OBJECTS = snbdr3.o snband.o -+snbdr4_OBJECTS = snbdr4.o snband.o -+snbdr5_OBJECTS = snbdr5.o snband.o -+snbdr6_OBJECTS = snbdr6.o snband.o -+ssbdr1_OBJECTS = ssbdr1.o ssband.o -+ssbdr2_OBJECTS = ssbdr2.o ssband.o -+ssbdr3_OBJECTS = ssbdr3.o ssband.o -+ssbdr4_OBJECTS = ssbdr4.o ssband.o -+ssbdr5_OBJECTS = ssbdr5.o ssband.o -+ssbdr6_OBJECTS = ssbdr6.o ssband.o -+znbdr1_OBJECTS = znbdr1.o znband.o -+znbdr2_OBJECTS = znbdr2.o znband.o -+znbdr3_OBJECTS = znbdr3.o znband.o -+znbdr4_OBJECTS = znbdr4.o znband.o -+ -+all : cnbdr1 cnbdr2 cnbdr3 cnbdr4 \ -+ dnbdr1 dnbdr2 dnbdr3 dnbdr4 dnbdr5 dnbdr6 \ -+ dsbdr1 dsbdr2 dsbdr3 dsbdr4 dsbdr5 dsbdr6 \ -+ snbdr1 snbdr2 snbdr3 snbdr4 snbdr5 snbdr6 \ -+ ssbdr1 ssbdr2 ssbdr3 ssbdr4 ssbdr5 ssbdr6 \ -+ znbdr1 znbdr2 znbdr3 znbdr4 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cnbdr1 : $(cnbdr1_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr2 : $(cnbdr2_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr3 : $(cnbdr3_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr4 : $(cnbdr4_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr1 : $(dnbdr1_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr2 : $(dnbdr2_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr3 : $(dnbdr3_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr4 : $(dnbdr4_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr5 : $(dnbdr5_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr6 : $(dnbdr6_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr1 : $(dsbdr1_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr2 : $(dsbdr2_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr3 : $(dsbdr3_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr4 : $(dsbdr4_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr5 : $(dsbdr5_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr6 : $(dsbdr6_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr1 : $(snbdr1_OBJECTS) -+ $(F77LD) -o $@ $(snbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr2 : $(snbdr2_OBJECTS) -+ $(F77LD) -o $@ $(snbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr3 : $(snbdr3_OBJECTS) -+ $(F77LD) -o $@ $(snbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr4 : $(snbdr4_OBJECTS) -+ $(F77LD) -o $@ $(snbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr5 : $(snbdr5_OBJECTS) -+ $(F77LD) -o $@ $(snbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr6 : $(snbdr6_OBJECTS) -+ $(F77LD) -o $@ $(snbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr1 : $(ssbdr1_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr2 : $(ssbdr2_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr3 : $(ssbdr3_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr4 : $(ssbdr4_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr5 : $(ssbdr5_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr6 : $(ssbdr6_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr1 : $(znbdr1_OBJECTS) -+ $(F77LD) -o $@ $(znbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr2 : $(znbdr2_OBJECTS) -+ $(F77LD) -o $@ $(znbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr3 : $(znbdr3_OBJECTS) -+ $(F77LD) -o $@ $(znbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr4 : $(znbdr4_OBJECTS) -+ $(F77LD) -o $@ $(znbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/COMPLEX/Makefile_complex_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/COMPLEX/Makefile_complex_intel 2021-06-30 10:36:04.496200000 +0200 -@@ -0,0 +1,59 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cndrv1_OBJECTS = cndrv1.o -+cndrv2_OBJECTS = cndrv2.o -+cndrv3_OBJECTS = cndrv3.o -+cndrv4_OBJECTS = cndrv4.o -+zndrv1_OBJECTS = zndrv1.o -+zndrv2_OBJECTS = zndrv2.o -+zndrv3_OBJECTS = zndrv3.o -+zndrv4_OBJECTS = zndrv4.o -+ -+all : cndrv1 cndrv2 cndrv3 cndrv4 \ -+ zndrv1 zndrv2 zndrv3 zndrv4 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cndrv1 : $(cndrv1_OBJECTS) -+ $(F77LD) -o $@ $(cndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv2 : $(cndrv2_OBJECTS) -+ $(F77LD) -o $@ $(cndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv3 : $(cndrv3_OBJECTS) -+ $(F77LD) -o $@ $(cndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv4 : $(cndrv4_OBJECTS) -+ $(F77LD) -o $@ $(cndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv1 : $(zndrv1_OBJECTS) -+ $(F77LD) -o $@ $(zndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv2 : $(zndrv2_OBJECTS) -+ $(F77LD) -o $@ $(zndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv3 : $(zndrv3_OBJECTS) -+ $(F77LD) -o $@ $(zndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv4 : $(zndrv4_OBJECTS) -+ $(F77LD) -o $@ $(zndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/NONSYM/Makefile_nonsym_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/NONSYM/Makefile_nonsym_intel 2021-06-30 10:36:04.539153000 +0200 -@@ -0,0 +1,75 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dndrv1_OBJECTS = dndrv1.o -+dndrv2_OBJECTS = dndrv2.o -+dndrv3_OBJECTS = dndrv3.o -+dndrv4_OBJECTS = dndrv4.o -+dndrv5_OBJECTS = dndrv5.o -+dndrv6_OBJECTS = dndrv6.o -+sndrv1_OBJECTS = sndrv1.o -+sndrv2_OBJECTS = sndrv2.o -+sndrv3_OBJECTS = sndrv3.o -+sndrv4_OBJECTS = sndrv4.o -+sndrv5_OBJECTS = sndrv5.o -+sndrv6_OBJECTS = sndrv6.o -+ -+all : dndrv1 dndrv2 dndrv3 dndrv4 dndrv5 dndrv6 \ -+ sndrv1 sndrv2 sndrv3 sndrv4 sndrv5 sndrv6 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+dndrv1 : $(dndrv1_OBJECTS) -+ $(F77LD) -o $@ $(dndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv2 : $(dndrv2_OBJECTS) -+ $(F77LD) -o $@ $(dndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv3 : $(dndrv3_OBJECTS) -+ $(F77LD) -o $@ $(dndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv4 : $(dndrv4_OBJECTS) -+ $(F77LD) -o $@ $(dndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv5 : $(dndrv5_OBJECTS) -+ $(F77LD) -o $@ $(dndrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv6 : $(dndrv6_OBJECTS) -+ $(F77LD) -o $@ $(dndrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv1 : $(sndrv1_OBJECTS) -+ $(F77LD) -o $@ $(sndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv2 : $(sndrv2_OBJECTS) -+ $(F77LD) -o $@ $(sndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv3 : $(sndrv3_OBJECTS) -+ $(F77LD) -o $@ $(sndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv4 : $(sndrv4_OBJECTS) -+ $(F77LD) -o $@ $(sndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv5 : $(sndrv5_OBJECTS) -+ $(F77LD) -o $@ $(sndrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv6 : $(sndrv6_OBJECTS) -+ $(F77LD) -o $@ $(sndrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/PARPACK/EXAMPLES/MPI/Makefile_parpack_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/PARPACK/EXAMPLES/MPI/Makefile_parpack_intel 2021-06-30 10:37:57.095101000 +0200 -@@ -0,0 +1,51 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+PARPACKLIB = -L$(ARPACK_LIB) -lparpack -larpack -+ -+pcndrv1_OBJECTS = pcndrv1.o -+pdndrv1_OBJECTS = pdndrv1.o -+pdndrv3_OBJECTS = pdndrv3.o -+psndrv1_OBJECTS = psndrv1.o -+psndrv3_OBJECTS = psndrv3.o -+pzndrv1_OBJECTS = pzndrv1.o -+ -+all : pcndrv1 pdndrv1 pdndrv3 \ -+ psndrv1 psndrv3 pzndrv1 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+pcndrv1 : $(pcndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pcndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pdndrv1 : $(pdndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pdndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pdndrv3 : $(pdndrv3_OBJECTS) -+ $(F77LD) -o $@ $(pdndrv3_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+psndrv1 : $(psndrv1_OBJECTS) -+ $(F77LD) -o $@ $(psndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+psndrv3 : $(psndrv3_OBJECTS) -+ $(F77LD) -o $@ $(psndrv3_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pzndrv1 : $(pzndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pzndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/SIMPLE/Makefile_simple_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/SIMPLE/Makefile_simple_intel 2021-06-30 10:36:04.541931000 +0200 -@@ -0,0 +1,51 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cnsimp_OBJECTS = cnsimp.o -+dnsimp_OBJECTS = dnsimp.o -+dssimp_OBJECTS = dssimp.o -+snsimp_OBJECTS = snsimp.o -+sssimp_OBJECTS = sssimp.o -+znsimp_OBJECTS = znsimp.o -+ -+all : cnsimp dnsimp dssimp \ -+ snsimp sssimp znsimp -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cnsimp : $(cnsimp_OBJECTS) -+ $(F77LD) -o $@ $(cnsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnsimp : $(dnsimp_OBJECTS) -+ $(F77LD) -o $@ $(dnsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dssimp : $(dssimp_OBJECTS) -+ $(F77LD) -o $@ $(dssimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snsimp : $(snsimp_OBJECTS) -+ $(F77LD) -o $@ $(snsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sssimp : $(sssimp_OBJECTS) -+ $(F77LD) -o $@ $(sssimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znsimp : $(znsimp_OBJECTS) -+ $(F77LD) -o $@ $(znsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/SVD/Makefile_svd_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/SVD/Makefile_svd_intel 2021-06-30 10:36:04.546102000 +0200 -@@ -0,0 +1,36 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dsvd_OBJECTS = dsvd.o -+ssvd_OBJECTS = ssvd.o -+ -+all : dsvd \ -+ ssvd -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+ -+dsvd : $(dsvd_OBJECTS) -+ $(F77LD) -o $@ $(dsvd_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssvd : $(ssvd_OBJECTS) -+ $(F77LD) -o $@ $(ssvd_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/SYM/Makefile_sym_intel 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/SYM/Makefile_sym_intel 2021-06-30 10:36:04.549670000 +0200 -@@ -0,0 +1,75 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dsdrv1_OBJECTS = dsdrv1.o -+dsdrv2_OBJECTS = dsdrv2.o -+dsdrv3_OBJECTS = dsdrv3.o -+dsdrv4_OBJECTS = dsdrv4.o -+dsdrv5_OBJECTS = dsdrv5.o -+dsdrv6_OBJECTS = dsdrv6.o -+ssdrv1_OBJECTS = ssdrv1.o -+ssdrv2_OBJECTS = ssdrv2.o -+ssdrv3_OBJECTS = ssdrv3.o -+ssdrv4_OBJECTS = ssdrv4.o -+ssdrv5_OBJECTS = ssdrv5.o -+ssdrv6_OBJECTS = ssdrv6.o -+ -+all : dsdrv1 dsdrv2 dsdrv3 dsdrv4 dsdrv5 dsdrv6 \ -+ ssdrv1 ssdrv2 ssdrv3 ssdrv4 ssdrv5 ssdrv6 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+dsdrv1 : $(dsdrv1_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv2 : $(dsdrv2_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv3 : $(dsdrv3_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv4 : $(dsdrv4_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv5 : $(dsdrv5_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv6 : $(dsdrv6_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv1 : $(ssdrv1_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv2 : $(ssdrv2_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv3 : $(ssdrv3_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv4 : $(ssdrv4_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv5 : $(ssdrv5_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv6 : $(ssdrv6_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-install-arpack-examples_gpsmkl.patch b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-install-arpack-examples_gpsmkl.patch deleted file mode 100644 index cb08b9ea9053080a2e005fe6cd8b7e318625133b..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-install-arpack-examples_gpsmkl.patch +++ /dev/null @@ -1,538 +0,0 @@ ---- arpack-ng-3.8.0/PARPACK_CHANGES 2021-06-30 09:57:39.220219000 +0200 -+++ arpack-ng-3.8.0-ok/PARPACK_CHANGES 2021-06-30 10:25:30.148960874 +0200 -@@ -334,4 +334,6 @@ - 36. 09/18/2016 p*apps.f and p*aitr.f contain condition to fetch machine epsilon. - When different p*aupd call use communicator with different number of CPU these - conditions cause deadlock. Variables inside conditions are moved to global -- to be reset each first iteration -\ No newline at end of file -+ to be reset each first iteration -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/BAND/Makefile_band 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/BAND/Makefile_band 2021-06-30 10:36:04.461587000 +0200 -@@ -0,0 +1,159 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cnbdr1_OBJECTS = cnbdr1.o cnband.o -+cnbdr2_OBJECTS = cnbdr2.o cnband.o -+cnbdr3_OBJECTS = cnbdr3.o cnband.o -+cnbdr4_OBJECTS = cnbdr4.o cnband.o -+dnbdr1_OBJECTS = dnbdr1.o dnband.o -+dnbdr2_OBJECTS = dnbdr2.o dnband.o -+dnbdr3_OBJECTS = dnbdr3.o dnband.o -+dnbdr4_OBJECTS = dnbdr4.o dnband.o -+dnbdr5_OBJECTS = dnbdr5.o dnband.o -+dnbdr6_OBJECTS = dnbdr6.o dnband.o -+dsbdr1_OBJECTS = dsbdr1.o dsband.o -+dsbdr2_OBJECTS = dsbdr2.o dsband.o -+dsbdr3_OBJECTS = dsbdr3.o dsband.o -+dsbdr4_OBJECTS = dsbdr4.o dsband.o -+dsbdr5_OBJECTS = dsbdr5.o dsband.o -+dsbdr6_OBJECTS = dsbdr6.o dsband.o -+snbdr1_OBJECTS = snbdr1.o snband.o -+snbdr2_OBJECTS = snbdr2.o snband.o -+snbdr3_OBJECTS = snbdr3.o snband.o -+snbdr4_OBJECTS = snbdr4.o snband.o -+snbdr5_OBJECTS = snbdr5.o snband.o -+snbdr6_OBJECTS = snbdr6.o snband.o -+ssbdr1_OBJECTS = ssbdr1.o ssband.o -+ssbdr2_OBJECTS = ssbdr2.o ssband.o -+ssbdr3_OBJECTS = ssbdr3.o ssband.o -+ssbdr4_OBJECTS = ssbdr4.o ssband.o -+ssbdr5_OBJECTS = ssbdr5.o ssband.o -+ssbdr6_OBJECTS = ssbdr6.o ssband.o -+znbdr1_OBJECTS = znbdr1.o znband.o -+znbdr2_OBJECTS = znbdr2.o znband.o -+znbdr3_OBJECTS = znbdr3.o znband.o -+znbdr4_OBJECTS = znbdr4.o znband.o -+ -+all : cnbdr1 cnbdr2 cnbdr3 cnbdr4 \ -+ dnbdr1 dnbdr2 dnbdr3 dnbdr4 dnbdr5 dnbdr6 \ -+ dsbdr1 dsbdr2 dsbdr3 dsbdr4 dsbdr5 dsbdr6 \ -+ snbdr1 snbdr2 snbdr3 snbdr4 snbdr5 snbdr6 \ -+ ssbdr1 ssbdr2 ssbdr3 ssbdr4 ssbdr5 ssbdr6 \ -+ znbdr1 znbdr2 znbdr3 znbdr4 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cnbdr1 : $(cnbdr1_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr2 : $(cnbdr2_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr3 : $(cnbdr3_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cnbdr4 : $(cnbdr4_OBJECTS) -+ $(F77LD) -o $@ $(cnbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr1 : $(dnbdr1_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr2 : $(dnbdr2_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr3 : $(dnbdr3_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr4 : $(dnbdr4_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr5 : $(dnbdr5_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnbdr6 : $(dnbdr6_OBJECTS) -+ $(F77LD) -o $@ $(dnbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr1 : $(dsbdr1_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr2 : $(dsbdr2_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr3 : $(dsbdr3_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr4 : $(dsbdr4_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr5 : $(dsbdr5_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsbdr6 : $(dsbdr6_OBJECTS) -+ $(F77LD) -o $@ $(dsbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr1 : $(snbdr1_OBJECTS) -+ $(F77LD) -o $@ $(snbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr2 : $(snbdr2_OBJECTS) -+ $(F77LD) -o $@ $(snbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr3 : $(snbdr3_OBJECTS) -+ $(F77LD) -o $@ $(snbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr4 : $(snbdr4_OBJECTS) -+ $(F77LD) -o $@ $(snbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr5 : $(snbdr5_OBJECTS) -+ $(F77LD) -o $@ $(snbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snbdr6 : $(snbdr6_OBJECTS) -+ $(F77LD) -o $@ $(snbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr1 : $(ssbdr1_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr2 : $(ssbdr2_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr3 : $(ssbdr3_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr4 : $(ssbdr4_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr5 : $(ssbdr5_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssbdr6 : $(ssbdr6_OBJECTS) -+ $(F77LD) -o $@ $(ssbdr6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr1 : $(znbdr1_OBJECTS) -+ $(F77LD) -o $@ $(znbdr1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr2 : $(znbdr2_OBJECTS) -+ $(F77LD) -o $@ $(znbdr2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr3 : $(znbdr3_OBJECTS) -+ $(F77LD) -o $@ $(znbdr3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znbdr4 : $(znbdr4_OBJECTS) -+ $(F77LD) -o $@ $(znbdr4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/COMPLEX/Makefile_complex 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/COMPLEX/Makefile_complex 2021-06-30 10:36:04.494505000 +0200 -@@ -0,0 +1,59 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cndrv1_OBJECTS = cndrv1.o -+cndrv2_OBJECTS = cndrv2.o -+cndrv3_OBJECTS = cndrv3.o -+cndrv4_OBJECTS = cndrv4.o -+zndrv1_OBJECTS = zndrv1.o -+zndrv2_OBJECTS = zndrv2.o -+zndrv3_OBJECTS = zndrv3.o -+zndrv4_OBJECTS = zndrv4.o -+ -+all : cndrv1 cndrv2 cndrv3 cndrv4 \ -+ zndrv1 zndrv2 zndrv3 zndrv4 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cndrv1 : $(cndrv1_OBJECTS) -+ $(F77LD) -o $@ $(cndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv2 : $(cndrv2_OBJECTS) -+ $(F77LD) -o $@ $(cndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv3 : $(cndrv3_OBJECTS) -+ $(F77LD) -o $@ $(cndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+cndrv4 : $(cndrv4_OBJECTS) -+ $(F77LD) -o $@ $(cndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv1 : $(zndrv1_OBJECTS) -+ $(F77LD) -o $@ $(zndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv2 : $(zndrv2_OBJECTS) -+ $(F77LD) -o $@ $(zndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv3 : $(zndrv3_OBJECTS) -+ $(F77LD) -o $@ $(zndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+zndrv4 : $(zndrv4_OBJECTS) -+ $(F77LD) -o $@ $(zndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/NONSYM/Makefile_nonsym 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/NONSYM/Makefile_nonsym 2021-06-30 10:36:04.498741000 +0200 -@@ -0,0 +1,75 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dndrv1_OBJECTS = dndrv1.o -+dndrv2_OBJECTS = dndrv2.o -+dndrv3_OBJECTS = dndrv3.o -+dndrv4_OBJECTS = dndrv4.o -+dndrv5_OBJECTS = dndrv5.o -+dndrv6_OBJECTS = dndrv6.o -+sndrv1_OBJECTS = sndrv1.o -+sndrv2_OBJECTS = sndrv2.o -+sndrv3_OBJECTS = sndrv3.o -+sndrv4_OBJECTS = sndrv4.o -+sndrv5_OBJECTS = sndrv5.o -+sndrv6_OBJECTS = sndrv6.o -+ -+all : dndrv1 dndrv2 dndrv3 dndrv4 dndrv5 dndrv6 \ -+ sndrv1 sndrv2 sndrv3 sndrv4 sndrv5 sndrv6 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+dndrv1 : $(dndrv1_OBJECTS) -+ $(F77LD) -o $@ $(dndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv2 : $(dndrv2_OBJECTS) -+ $(F77LD) -o $@ $(dndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv3 : $(dndrv3_OBJECTS) -+ $(F77LD) -o $@ $(dndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv4 : $(dndrv4_OBJECTS) -+ $(F77LD) -o $@ $(dndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv5 : $(dndrv5_OBJECTS) -+ $(F77LD) -o $@ $(dndrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dndrv6 : $(dndrv6_OBJECTS) -+ $(F77LD) -o $@ $(dndrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv1 : $(sndrv1_OBJECTS) -+ $(F77LD) -o $@ $(sndrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv2 : $(sndrv2_OBJECTS) -+ $(F77LD) -o $@ $(sndrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv3 : $(sndrv3_OBJECTS) -+ $(F77LD) -o $@ $(sndrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv4 : $(sndrv4_OBJECTS) -+ $(F77LD) -o $@ $(sndrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv5 : $(sndrv5_OBJECTS) -+ $(F77LD) -o $@ $(sndrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sndrv6 : $(sndrv6_OBJECTS) -+ $(F77LD) -o $@ $(sndrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/PARPACK/EXAMPLES/MPI/Makefile_parpack 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/PARPACK/EXAMPLES/MPI/Makefile_parpack 2021-06-30 10:37:57.093198000 +0200 -@@ -0,0 +1,51 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+PARPACKLIB = -L$(ARPACK_LIB) -lparpack -larpack -+ -+pcndrv1_OBJECTS = pcndrv1.o -+pdndrv1_OBJECTS = pdndrv1.o -+pdndrv3_OBJECTS = pdndrv3.o -+psndrv1_OBJECTS = psndrv1.o -+psndrv3_OBJECTS = psndrv3.o -+pzndrv1_OBJECTS = pzndrv1.o -+ -+all : pcndrv1 pdndrv1 pdndrv3 \ -+ psndrv1 psndrv3 pzndrv1 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+pcndrv1 : $(pcndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pcndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pdndrv1 : $(pdndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pdndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pdndrv3 : $(pdndrv3_OBJECTS) -+ $(F77LD) -o $@ $(pdndrv3_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+psndrv1 : $(psndrv1_OBJECTS) -+ $(F77LD) -o $@ $(psndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+psndrv3 : $(psndrv3_OBJECTS) -+ $(F77LD) -o $@ $(psndrv3_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+pzndrv1 : $(pzndrv1_OBJECTS) -+ $(F77LD) -o $@ $(pzndrv1_OBJECTS) $(PARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/SIMPLE/Makefile_simple 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/SIMPLE/Makefile_simple 2021-06-30 10:36:04.541054000 +0200 -@@ -0,0 +1,51 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+cnsimp_OBJECTS = cnsimp.o -+dnsimp_OBJECTS = dnsimp.o -+dssimp_OBJECTS = dssimp.o -+snsimp_OBJECTS = snsimp.o -+sssimp_OBJECTS = sssimp.o -+znsimp_OBJECTS = znsimp.o -+ -+all : cnsimp dnsimp dssimp \ -+ snsimp sssimp znsimp -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+cnsimp : $(cnsimp_OBJECTS) -+ $(F77LD) -o $@ $(cnsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dnsimp : $(dnsimp_OBJECTS) -+ $(F77LD) -o $@ $(dnsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dssimp : $(dssimp_OBJECTS) -+ $(F77LD) -o $@ $(dssimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+snsimp : $(snsimp_OBJECTS) -+ $(F77LD) -o $@ $(snsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+sssimp : $(sssimp_OBJECTS) -+ $(F77LD) -o $@ $(sssimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+znsimp : $(znsimp_OBJECTS) -+ $(F77LD) -o $@ $(znsimp_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/SVD/Makefile_svd 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/SVD/Makefile_svd 2021-06-30 10:36:04.545031000 +0200 -@@ -0,0 +1,36 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dsvd_OBJECTS = dsvd.o -+ssvd_OBJECTS = ssvd.o -+ -+all : dsvd \ -+ ssvd -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+ -+dsvd : $(dsvd_OBJECTS) -+ $(F77LD) -o $@ $(dsvd_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssvd : $(ssvd_OBJECTS) -+ $(F77LD) -o $@ $(ssvd_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ ---- arpack-ng-3.8.0/EXAMPLES/SYM/Makefile_sym 1970-01-01 01:00:00.000000000 +0100 -+++ arpack-ng-3.8.0-ok/EXAMPLES/SYM/Makefile_sym 2021-06-30 10:36:04.548174000 +0200 -@@ -0,0 +1,75 @@ -+# This Makefile.in is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+# -+CC = mpicc -+CFLAGS = -g -O2 -+CPP = mpicc -E -+CPPFLAGS = -+ -+F77 = mpif77 -+FFLAGS = -g -O2 -+F77LD = $(F77) -+FGREP = /bin/grep -F -+ -+LAPACK_LIBS = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -+ -+ARPACKLIB = -L$(ARPACK_LIB) -larpack -+ -+dsdrv1_OBJECTS = dsdrv1.o -+dsdrv2_OBJECTS = dsdrv2.o -+dsdrv3_OBJECTS = dsdrv3.o -+dsdrv4_OBJECTS = dsdrv4.o -+dsdrv5_OBJECTS = dsdrv5.o -+dsdrv6_OBJECTS = dsdrv6.o -+ssdrv1_OBJECTS = ssdrv1.o -+ssdrv2_OBJECTS = ssdrv2.o -+ssdrv3_OBJECTS = ssdrv3.o -+ssdrv4_OBJECTS = ssdrv4.o -+ssdrv5_OBJECTS = ssdrv5.o -+ssdrv6_OBJECTS = ssdrv6.o -+ -+all : dsdrv1 dsdrv2 dsdrv3 dsdrv4 dsdrv5 dsdrv6 \ -+ ssdrv1 ssdrv2 ssdrv3 ssdrv4 ssdrv5 ssdrv6 -+ -+.SUFFIXES: .f .o -+.f.o: -+ $(F77) -c -o $@ $< -+ -+dsdrv1 : $(dsdrv1_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv2 : $(dsdrv2_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv3 : $(dsdrv3_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv4 : $(dsdrv4_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv5 : $(dsdrv5_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+dsdrv6 : $(dsdrv6_OBJECTS) -+ $(F77LD) -o $@ $(dsdrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv1 : $(ssdrv1_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv1_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv2 : $(ssdrv2_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv2_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv3 : $(ssdrv3_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv3_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv4 : $(ssdrv4_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv4_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv5 : $(ssdrv5_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv5_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ssdrv6 : $(ssdrv6_OBJECTS) -+ $(F77LD) -o $@ $(ssdrv6_OBJECTS) $(ARPACKLIB) $(LAPACK_LIBS) -+ -+ diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-intel-2021.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-intel-2021.eb deleted file mode 100644 index 90552dce2576d713d1afefd5e50aa04083259e93..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-intel-2021.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.8.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --enable-static --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-intel-para-2021.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-intel-para-2021.eb deleted file mode 100644 index 4c7b9fcb96dbf4a0658715352254c7a48bc73ccf..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-intel-para-2021.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.8.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --enable-static --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-iomkl-2021.eb b/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-iomkl-2021.eb deleted file mode 100644 index 3ed6e448ebce3c96af873c3342bbaada41c83850..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ARPACK-NG/ARPACK-NG-3.8.0-iomkl-2021.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ARPACK-NG' -version = '3.8.0' - -homepage = 'http://forge.scilab.org/index.php/p/arpack-ng/' -description = """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems. - -libarpack.a and libparpack.a have been installed in $EBROOTARPACKMINNG. - -In addition the variables ARPACK_ROOT, ARPACK_LIB, PARPACK_ROOT, and PARPACK_LIB are set. -""" - -examples = 'Examples can be found in $ARPACK_ROOT/EXAMPLES' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'opt': True, 'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/opencollab/arpack-ng/archive/'] -sources = ["%(version)s.tar.gz"] - -patches = [ - 'ARPACK-NG-%(version)s-install-arpack-examples.patch' -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2') -] - -preconfigopts = 'sh bootstrap &&' -configopts = '--enable-mpi --with-pic --enable-static --with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK"' - -postinstallcmds = [ - "cp -r EXAMPLES %(installdir)s/EXAMPLES", - "cp -r PARPACK/EXAMPLES/MPI %(installdir)s/EXAMPLES/PARPACK", -] - -sanity_check_paths = { - 'files': ["lib/libarpack.a", "lib/libarpack.%s" % SHLIB_EXT, "lib/libparpack.a", "lib/libparpack.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = { - 'ARPACK_ROOT': '%(installdir)s', - 'PARPACK_ROOT': '%(installdir)s', - 'ARPACK_LIB': '%(installdir)s/lib', - 'PARPACK_LIB': '%(installdir)s/lib' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-gompi-2020-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-gompi-2020-Python-3.8.5.eb deleted file mode 100644 index 0687af05829e7a8a764e7a0a0849b754beb8a7a2..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-gompi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-gompi-2021-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-gompi-2021-Python-3.8.5.eb deleted file mode 100644 index 60357ccf97a5eaf5d6ffac01426938d4dbb2dff2..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-gompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index e272fcf3a459df71df820117189e96a98b4c12b4..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 6163a117d646b9d4246068f7da263228f91dabb4..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-iimpi-2020-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-iimpi-2020-Python-3.8.5.eb deleted file mode 100644 index a3f8147c3c08be4e6dde8b2d5b94295cca064ff2..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-iimpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-iimpi-2021-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-iimpi-2021-Python-3.8.5.eb deleted file mode 100644 index d5d68f1987f3bb1657d4eb821c30617d913a46df..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-iimpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-iompi-2020-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-iompi-2020-Python-3.8.5.eb deleted file mode 100644 index 9566221f13e1ff360afc96ac208fe5efb94379ad..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-iompi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-iompi-2021-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-iompi-2021-Python-3.8.5.eb deleted file mode 100644 index 59c45f034b39c98e9b73a9ba7cadefcc8d85cdc1..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-iompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-ipsmpi-2020-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-ipsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index 4dcd5a468f40e93ed2539719757704eda05fe81c..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-ipsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/ASE/ASE-3.19.2-ipsmpi-2021-Python-3.8.5.eb b/Golden_Repo/a/ASE/ASE-3.19.2-ipsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 7d5128c8f64dd53fe96c46bdb16e3dacadd7314c..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ASE/ASE-3.19.2-ipsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'ASE' -version = '3.19.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/ase/' -description = """ASE is a python package providing an open source Atomic Simulation Environment in the Python scripting -language.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), -] - -download_dep_fail = True - -sanity_check_paths = { - 'files': ['bin/ase-build', 'bin/ase-db', 'bin/ase-gui', 'bin/ase-info', 'bin/ase-run'], - 'dirs': [''], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/AT-SPI2-ATK/AT-SPI2-ATK-2.34.2-GCCcore-10.3.0.eb b/Golden_Repo/a/AT-SPI2-ATK/AT-SPI2-ATK-2.34.2-GCCcore-10.3.0.eb deleted file mode 100644 index f90e96dbb3e1d7d1d92e6851b73d5e3dec5fe5f9..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AT-SPI2-ATK/AT-SPI2-ATK-2.34.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'AT-SPI2-ATK' -version = '2.34.2' - -homepage = 'https://developer.gnome.org/ATK/stable/' -description = """ - ATK provides the set of accessibility interfaces that are implemented by other - toolkits and applications. Using the ATK interfaces, accessibility tools have - full access to view and control running applications. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('DBus', '1.12.20'), - ('ATK', '2.36.0'), - ('AT-SPI2-core', '2.36.0') -] - -modextrapaths = { - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib/libatk-bridge-2.0.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/a/AT-SPI2-ATK/AT-SPI2-ATK-2.34.2-GCCcore-9.3.0.eb b/Golden_Repo/a/AT-SPI2-ATK/AT-SPI2-ATK-2.34.2-GCCcore-9.3.0.eb deleted file mode 100644 index 2193b26aea05f1fbe2a5cd4db777bf5be1317178..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AT-SPI2-ATK/AT-SPI2-ATK-2.34.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'AT-SPI2-ATK' -version = '2.34.2' - -homepage = 'https://developer.gnome.org/ATK/stable/' -description = """ - ATK provides the set of accessibility interfaces that are implemented by other - toolkits and applications. Using the ATK interfaces, accessibility tools have - full access to view and control running applications. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('DBus', '1.12.20'), - ('ATK', '2.36.0'), - ('AT-SPI2-core', '2.36.0') -] - -modextrapaths = { - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib/libatk-bridge-2.0.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/a/AT-SPI2-core/AT-SPI2-core-2.36.0-GCCcore-10.3.0.eb b/Golden_Repo/a/AT-SPI2-core/AT-SPI2-core-2.36.0-GCCcore-10.3.0.eb deleted file mode 100644 index 6db137ca6380732bdcde8aafd7148b6727b15bfa..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AT-SPI2-core/AT-SPI2-core-2.36.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'AT-SPI2-core' -version = '2.36.0' - -homepage = 'https://developer.gnome.org/ATK/stable/' -description = """ - ATK provides the set of accessibility interfaces that are implemented by other - toolkits and applications. Using the ATK interfaces, accessibility tools have - full access to view and control running applications. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - - -builddependencies = [ - ('binutils', '2.36.1'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('DBus', '1.12.20'), - ('intltool', '0.51.0', '-Perl-5.32.0'), - ('X11', '20200222'), -] - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib64/libatspi.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/a/AT-SPI2-core/AT-SPI2-core-2.36.0-GCCcore-9.3.0.eb b/Golden_Repo/a/AT-SPI2-core/AT-SPI2-core-2.36.0-GCCcore-9.3.0.eb deleted file mode 100644 index 95aa1357897581535d5416ed2c69e2490d18b86e..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AT-SPI2-core/AT-SPI2-core-2.36.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'AT-SPI2-core' -version = '2.36.0' - -homepage = 'https://developer.gnome.org/ATK/stable/' -description = """ - ATK provides the set of accessibility interfaces that are implemented by other - toolkits and applications. Using the ATK interfaces, accessibility tools have - full access to view and control running applications. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - - -builddependencies = [ - ('binutils', '2.34'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('DBus', '1.12.20'), - ('intltool', '0.51.0', '-Perl-5.32.0'), - ('X11', '20200222'), -] - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib64/libatspi.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/a/ATK/ATK-2.36.0-GCCcore-10.3.0.eb b/Golden_Repo/a/ATK/ATK-2.36.0-GCCcore-10.3.0.eb deleted file mode 100644 index 5153cdbda136dcfa73a79c2c0b2654b0598d4fa4..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ATK/ATK-2.36.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'ATK' -version = '2.36.0' - -homepage = 'https://developer.gnome.org/ATK/stable/' -description = """ - ATK provides the set of accessibility interfaces that are implemented by other - toolkits and applications. Using the ATK interfaces, accessibility tools have - full access to view and control running applications. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('pkg-config', '0.29.2'), - ('Ninja', '1.10.0') -] - -dependencies = [ - ('GLib', '2.64.4'), -] - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib64/libatk-1.0.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/a/ATK/ATK-2.36.0-GCCcore-9.3.0.eb b/Golden_Repo/a/ATK/ATK-2.36.0-GCCcore-9.3.0.eb deleted file mode 100644 index 4147aff0258662cbdf25dc7b5bd837842b1be64d..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ATK/ATK-2.36.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'ATK' -version = '2.36.0' - -homepage = 'https://developer.gnome.org/ATK/stable/' -description = """ - ATK provides the set of accessibility interfaces that are implemented by other - toolkits and applications. Using the ATK interfaces, accessibility tools have - full access to view and control running applications. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('pkg-config', '0.29.2'), - ('Ninja', '1.10.0') -] - -dependencies = [ - ('GLib', '2.64.4'), -] - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib64/libatk-1.0.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/a/Advisor/Advisor-2020_update3.eb b/Golden_Repo/a/Advisor/Advisor-2020_update3.eb deleted file mode 100644 index 3125a091a97eac23431fb0e0cb1a85266d13acde..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Advisor/Advisor-2020_update3.eb +++ /dev/null @@ -1,21 +0,0 @@ -name = 'Advisor' -version = '2020_update3' - -homepage = 'https://software.intel.com/intel-advisor-xe' -description = """Vectorization Optimization and Thread Prototyping - - Vectorize & thread code or performance "dies" - - Easy workflow + data + tips = faster code faster - - Prioritize, Prototype & Predict performance gain -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['advisor_%(version)s.tar.gz'] - -dontcreateinstalldir = True - -requires_runtime_license = False - -moduleclass = 'perf' diff --git a/Golden_Repo/a/AtomPAW/AtomPAW-4.1.0.5-gpsmkl-2020.eb b/Golden_Repo/a/AtomPAW/AtomPAW-4.1.0.5-gpsmkl-2020.eb deleted file mode 100644 index 2d08f82e2e11ff033bc3f7828c7620fc2168828a..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AtomPAW/AtomPAW-4.1.0.5-gpsmkl-2020.eb +++ /dev/null @@ -1,43 +0,0 @@ -## -# This version of AtomPAW is lastest version to be used with ABINIT 8.10.x -## - - -easyblock = 'ConfigureMake' - - -name = 'AtomPAW' -version = '4.1.0.5' - -homepage = 'http://users.wfu.edu/natalie/papers/pwpaw/man.html' -description = """ -AtomPAW is a Projector-Augmented Wave Dataset Generator that -can be used both as a standalone program and a library. -""" - -site_contacts = 'Sebastian Achilles (s.achilles@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://users.wfu.edu/natalie/papers/pwpaw/'] - -checksums = ['d71d4c0ac83638b6b50aa976d97197ca8ed45188a13372a1d141d810857a05c1'] - -dependencies = [ - ('libxc', '3.0.1'), -] - -configopts = '--enable-libxc' -configopts += ' --with-libxc-incs="-I$EBROOTLIBXC/include"' -configopts += ' --with-libxc-libs="-L$EBROOTLIBXC/lib -lxc"' - -configopts += ' --with-linalg-libs="-L$EBROOTIMKL/lib/intel64 -Wl,--start-group' -configopts += ' -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -Wl,--end-group -lpthread -lm -ldl" ' - -sanity_check_paths = { - 'files': ['bin/atompaw', 'bin/graphatom', 'lib/libatompaw.a'], - 'dirs': ['lib'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/AtomPAW/AtomPAW-4.1.0.5-intel-para-2020.eb b/Golden_Repo/a/AtomPAW/AtomPAW-4.1.0.5-intel-para-2020.eb deleted file mode 100644 index 2ed48d5838b502f7550f8a5b3153f945d2d4636b..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/AtomPAW/AtomPAW-4.1.0.5-intel-para-2020.eb +++ /dev/null @@ -1,43 +0,0 @@ -## -# This version of AtomPAW is lastest version to be used with ABINIT 8.10.x -## - - -easyblock = 'ConfigureMake' - - -name = 'AtomPAW' -version = '4.1.0.5' - -homepage = 'http://users.wfu.edu/natalie/papers/pwpaw/man.html' -description = """ -AtomPAW is a Projector-Augmented Wave Dataset Generator that -can be used both as a standalone program and a library. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2020'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://users.wfu.edu/natalie/papers/pwpaw/'] - -checksums = ['d71d4c0ac83638b6b50aa976d97197ca8ed45188a13372a1d141d810857a05c1'] - -dependencies = [ - ('libxc', '3.0.1'), -] - -configopts = '--enable-libxc' -configopts += ' --with-libxc-incs="-I$EBROOTLIBXC/include"' -configopts += ' --with-libxc-libs="-L$EBROOTLIBXC/lib -lxc"' - -configopts += ' --with-linalg-libs="-L$EBROOTIMKL/lib/intel64 -Wl,--start-group' -configopts += ' -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -Wl,--end-group -lpthread -lm -ldl" ' - -sanity_check_paths = { - 'files': ['bin/atompaw', 'bin/graphatom', 'lib/libatompaw.a'], - 'dirs': ['lib'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/a/Autoconf/Autoconf-2.69-GCCcore-10.3.0.eb b/Golden_Repo/a/Autoconf/Autoconf-2.69-GCCcore-10.3.0.eb deleted file mode 100644 index 385f9ad4d0b611e0515a70978464ccb8b08e74f9..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Autoconf/Autoconf-2.69-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Autoconf' -version = '2.69' - -homepage = 'http://www.gnu.org/software/autoconf/' -description = """Autoconf is an extensible package of M4 macros that produce shell scripts - to automatically configure software source code packages. These scripts can adapt the - packages to many kinds of UNIX-like systems without manual user intervention. Autoconf - creates a configuration script for a package from a template file that lists the - operating system features that the package can use, in the form of M4 macro calls. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1') -] - -dependencies = [ - ('M4', '1.4.18'), - ('Perl', '5.32.0'), -] - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["autoconf", "autoheader", "autom4te", "autoreconf", "autoscan", - "autoupdate", "ifnames"]], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/a/Autoconf/Autoconf-2.69-GCCcore-9.3.0.eb b/Golden_Repo/a/Autoconf/Autoconf-2.69-GCCcore-9.3.0.eb deleted file mode 100644 index 7dcabac85c6d9ccdaa9ac7140e4a60d38480ba3b..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Autoconf/Autoconf-2.69-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Autoconf' -version = '2.69' - -homepage = 'http://www.gnu.org/software/autoconf/' -description = """Autoconf is an extensible package of M4 macros that produce shell scripts - to automatically configure software source code packages. These scripts can adapt the - packages to many kinds of UNIX-like systems without manual user intervention. Autoconf - creates a configuration script for a package from a template file that lists the - operating system features that the package can use, in the form of M4 macro calls. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["autoconf", "autoheader", "autom4te", "autoreconf", "autoscan", - "autoupdate", "ifnames"]], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/a/Autoconf/Autoconf-2.69.eb b/Golden_Repo/a/Autoconf/Autoconf-2.69.eb deleted file mode 100644 index 1a421adce104a234cf5b1cca90f97c92d5584417..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Autoconf/Autoconf-2.69.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Autoconf' -version = '2.69' - -homepage = 'http://www.gnu.org/software/autoconf/' -description = """Autoconf is an extensible package of M4 macros that produce shell scripts - to automatically configure software source code packages. These scripts can adapt the - packages to many kinds of UNIX-like systems without manual user intervention. Autoconf - creates a configuration script for a package from a template file that lists the - operating system features that the package can use, in the form of M4 macro calls. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["autoconf", "autoheader", "autom4te", "autoreconf", "autoscan", - "autoupdate", "ifnames"]], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/a/Automake/Automake-1.16.2-GCCcore-10.3.0.eb b/Golden_Repo/a/Automake/Automake-1.16.2-GCCcore-10.3.0.eb deleted file mode 100644 index 31d12dd1817fdf62a635d66f730fd34defc262a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Automake/Automake-1.16.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/ -## - -easyblock = 'ConfigureMake' - -name = 'Automake' -version = "1.16.2" - -homepage = 'http://www.gnu.org/software/automake/automake.html' -description = """ -Automake: GNU Standards-compliant Makefile generator -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [('binutils', '2.36.1')] -dependencies = [ - ('Autoconf', '2.69'), - ('Perl', '5.32.0'), -] - -sanity_check_paths = { - 'files': ['bin/automake', 'bin/aclocal'], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/a/Automake/Automake-1.16.2-GCCcore-9.3.0.eb b/Golden_Repo/a/Automake/Automake-1.16.2-GCCcore-9.3.0.eb deleted file mode 100644 index 53d29c2eeebbd61bb2457e21b57ef501ba6b162f..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Automake/Automake-1.16.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/ -## - -easyblock = 'ConfigureMake' - -name = 'Automake' -version = "1.16.2" - -homepage = 'http://www.gnu.org/software/automake/automake.html' -description = """ -Automake: GNU Standards-compliant Makefile generator -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [('binutils', '2.34')] -dependencies = [('Autoconf', '2.69')] - -sanity_check_paths = { - 'files': ['bin/automake', 'bin/aclocal'], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/a/Automake/Automake-1.16.2.eb b/Golden_Repo/a/Automake/Automake-1.16.2.eb deleted file mode 100644 index 59772d5e435bc5ffb097018f1e11b7ac17bd92d9..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Automake/Automake-1.16.2.eb +++ /dev/null @@ -1,38 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/ -## - -easyblock = 'ConfigureMake' - -name = 'Automake' -version = "1.16.2" - -homepage = 'http://www.gnu.org/software/automake/automake.html' -description = """ -Automake: GNU Standards-compliant Makefile generator -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [('binutils', '2.34')] -dependencies = [('Autoconf', '2.69')] - -sanity_check_paths = { - 'files': ['bin/automake', 'bin/aclocal'], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/a/Autotools/Autotools-20200321-GCCcore-10.3.0.eb b/Golden_Repo/a/Autotools/Autotools-20200321-GCCcore-10.3.0.eb deleted file mode 100644 index 4a66f7cac68aa5c7e72694c45e2fc4f0effdbf83..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Autotools/Autotools-20200321-GCCcore-10.3.0.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'Bundle' - -name = 'Autotools' -version = '20200321' # date of the most recent change - -homepage = 'http://autotools.io' -description = """ - This bundle collect the standard GNU build tools: Autoconf, Automake and libtool -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -dependencies = [ - ('Autoconf', '2.69'), # 20120424 - ('Automake', '1.16.2'), # 20200321 - ('libtool', '2.4.6'), # 20150215 -] - -moduleclass = 'devel' diff --git a/Golden_Repo/a/Autotools/Autotools-20200321-GCCcore-9.3.0.eb b/Golden_Repo/a/Autotools/Autotools-20200321-GCCcore-9.3.0.eb deleted file mode 100644 index ac6154883f92b8f2be58e10d8d3b7c86328e67d4..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Autotools/Autotools-20200321-GCCcore-9.3.0.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'Bundle' - -name = 'Autotools' -version = '20200321' # date of the most recent change - -homepage = 'http://autotools.io' -description = """ - This bundle collect the standard GNU build tools: Autoconf, Automake and libtool -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -dependencies = [ - ('Autoconf', '2.69'), # 20120424 - ('Automake', '1.16.2'), # 20200321 - ('libtool', '2.4.6'), # 20150215 -] - -moduleclass = 'devel' diff --git a/Golden_Repo/a/Autotools/Autotools-20200321.eb b/Golden_Repo/a/Autotools/Autotools-20200321.eb deleted file mode 100644 index d8d73804faae32e9009cdfbc258a053ae3bfa7dc..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/Autotools/Autotools-20200321.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'Bundle' - -name = 'Autotools' -version = '20200321' # date of the most recent change - -homepage = 'http://autotools.io' -description = """ - This bundle collect the standard GNU build tools: Autoconf, Automake and libtool -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -dependencies = [ - ('Autoconf', '2.69'), # 20120424 - ('Automake', '1.16.2'), # 20200321 - ('libtool', '2.4.6'), # 20150215 -] - -moduleclass = 'devel' diff --git a/Golden_Repo/a/ant/ant-1.10.9-Java-15.eb b/Golden_Repo/a/ant/ant-1.10.9-Java-15.eb deleted file mode 100644 index 299ccd4b4fc08db690844212a670ba2a73e422e1..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/ant/ant-1.10.9-Java-15.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'PackedBinary' - -name = 'ant' -version = '1.10.9' -versionsuffix = '-Java-%(javaver)s' - -homepage = 'https://ant.apache.org/' -description = """Apache Ant is a Java library and command-line tool whose mission is to drive processes described in - build files as targets and extension points dependent upon each other. The main known usage of Ant is the build of - Java applications.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://archive.apache.org/dist/ant/binaries/'] -sources = ['apache-%(name)s-%(version)s-bin.tar.gz'] -checksums = ['5f8a85ddee6effe79163aa54c7bef6b60305e37200dedc1986437fb5c072a9f3'] - -dependencies = [('Java', '15')] - -sanity_check_paths = { - 'files': ['bin/ant', 'lib/ant.jar'], - 'dirs': [], -} - -modextravars = {'ANT_HOME': '%(installdir)s'} - -moduleclass = 'devel' diff --git a/Golden_Repo/a/archspec/archspec-0.1.0-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/a/archspec/archspec-0.1.0-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 3ea5dff019b09945b1148cac22e85d003d03db5a..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/archspec/archspec-0.1.0-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'archspec' -version = '0.1.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/archspec/archspec' -description = "A library for detecting, labeling, and reasoning about microarchitectures" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [PYPI_SOURCE] -sources = ['%(name)s-%(version)s-py2.py3-none-any.whl'] -checksums = ['12f2029f63ffbc560e43f7d1f366a45ff46c7bd0751653227f8015f83f121119'] - -builddependencies = [ - ('binutils', '2.36.1'), -] -dependencies = [ - ('Python', '3.8.5'), -] - -unpack_sources = False -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -sanity_check_commands = ["python -c 'from %(name)s.cpu import host; print(host())'"] - -moduleclass = 'tools' diff --git a/Golden_Repo/a/archspec/archspec-0.1.0-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/a/archspec/archspec-0.1.0-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index b5e1de9a1eb325d31e720e79fa0612895bc7daab..0000000000000000000000000000000000000000 --- a/Golden_Repo/a/archspec/archspec-0.1.0-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'archspec' -version = '0.1.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/archspec/archspec' -description = "A library for detecting, labeling, and reasoning about microarchitectures" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [PYPI_SOURCE] -sources = ['%(name)s-%(version)s-py2.py3-none-any.whl'] -checksums = ['12f2029f63ffbc560e43f7d1f366a45ff46c7bd0751653227f8015f83f121119'] - -builddependencies = [ - ('binutils', '2.34'), -] -dependencies = [ - ('Python', '3.8.5'), -] - -unpack_sources = False -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -sanity_check_commands = ["python -c 'from %(name)s.cpu import host; print(host())'"] - -moduleclass = 'tools' diff --git a/Golden_Repo/b/Bazel/Bazel-3.4.1-fix-grpc-protoc.patch b/Golden_Repo/b/Bazel/Bazel-3.4.1-fix-grpc-protoc.patch deleted file mode 100644 index ecc4021a049801f6f37470d71e7e635a314bbb09..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Bazel/Bazel-3.4.1-fix-grpc-protoc.patch +++ /dev/null @@ -1,27 +0,0 @@ -From cd3c41eb5a29ca475b7bafc42aa71e94363d46df Mon Sep 17 00:00:00 2001 -From: Alexander Grund <alexander.grund@tu-dresden.de> -Date: Tue, 28 Jul 2020 19:51:13 +0200 -Subject: [PATCH] Fix environment for protobuf compilation in grpc - -Add use_default_shell_env = True to protoc invocation for grpc to mirror -what the protobuf cc_proto_library & co are doing -Fixes a failure in invocing protoc when it is build in a non-default -environment (e.g. with a custom LD_LIBRARY_PATH) - -Fixes #11852, fixes #11855 ---- - third_party/grpc/bazel/generate_cc.bzl | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/third_party/grpc/bazel/generate_cc.bzl b/third_party/grpc/bazel/generate_cc.bzl -index 38a5b460f90..d5a4e27bc88 100644 ---- a/third_party/grpc/bazel/generate_cc.bzl -+++ b/third_party/grpc/bazel/generate_cc.bzl -@@ -123,6 +123,7 @@ def generate_cc_impl(ctx): - outputs = out_files, - executable = ctx.executable.protoc, - arguments = arguments, -+ use_default_shell_env = True, - ) - - return struct(files = depset(out_files)) diff --git a/Golden_Repo/b/Bazel/Bazel-3.6.0-GCCcore-10.3.0.eb b/Golden_Repo/b/Bazel/Bazel-3.6.0-GCCcore-10.3.0.eb deleted file mode 100644 index fbf29f23934f1ce12ab4b29a703a6dbaf22f0d8e..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Bazel/Bazel-3.6.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'Bazel' -version = '3.6.0' - -homepage = 'http://bazel.io/' -description = """Bazel is a build tool that builds code quickly and reliably. -It is used to build the majority of Google's software.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'https://github.com/bazelbuild/bazel/releases/download/%(version)s'] -sources = ['%(namelower)s-%(version)s-dist.zip'] -patches = ['%(name)s-3.4.1-fix-grpc-protoc.patch', 'java15.patch'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Python', '3.8.5'), - ('Zip', '3.0'), -] - -prebuildopts = "export BAZEL_LINKOPTS=-static-libstdc++:-static-libgcc BAZEL_LINKLIBS=-l%:libstdc++.a:-lm && " - -dependencies = [('Java', '15', '', True)] - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Bazel/Bazel-3.6.0-GCCcore-9.3.0.eb b/Golden_Repo/b/Bazel/Bazel-3.6.0-GCCcore-9.3.0.eb deleted file mode 100644 index 3bcaab02204341421c1cefaf3da1985e5d3a1a61..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Bazel/Bazel-3.6.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'Bazel' -version = '3.6.0' - -homepage = 'http://bazel.io/' -description = """Bazel is a build tool that builds code quickly and reliably. -It is used to build the majority of Google's software.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/bazelbuild/bazel/releases/download/%(version)s'] -sources = ['%(namelower)s-%(version)s-dist.zip'] -patches = ['%(name)s-3.4.1-fix-grpc-protoc.patch'] - -builddependencies = [ - ('binutils', '2.34'), - ('Python', '3.8.5'), - ('Zip', '3.0'), -] - -prebuildopts = "export BAZEL_LINKOPTS=-static-libstdc++:-static-libgcc BAZEL_LINKLIBS=-l%:libstdc++.a:-lm && " - -dependencies = [('Java', '1.8', '', True)] - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Bazel/java15.patch b/Golden_Repo/b/Bazel/java15.patch deleted file mode 100644 index 6fc6a95634e69a17caa9e08ba524079bedc13c6f..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Bazel/java15.patch +++ /dev/null @@ -1,29 +0,0 @@ -From 0216ee54417fa1f2fef14f6eb14cbc1e8f595821 Mon Sep 17 00:00:00 2001 -From: philwo <philwo@google.com> -Date: Mon, 8 Feb 2021 10:45:50 -0800 -Subject: [PATCH] Fix Bazel #10214: JDK 13 introduced a source compatibility - issue. - -Quote from the Java release notes: - -The addition of newFileSystem(Path, Map<String, ?>) creates a source (but not binary) compatibility issue for code that has been using the existing 2-arg newFileSystem(Path, ClassLoader) and specifying the class loader as null. [...] To avoid the ambiguous reference, this code needs to be modified to cast the second parameter to java.lang.ClassLoader. - -RELNOTES: -PiperOrigin-RevId: 356301318 ---- - .../com/google/devtools/build/buildjar/VanillaJavaBuilder.java | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/VanillaJavaBuilder.java b/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/VanillaJavaBuilder.java -index 327017362626..5edf9ba0cf23 100644 ---- a/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/VanillaJavaBuilder.java -+++ b/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/VanillaJavaBuilder.java -@@ -77,7 +77,7 @@ - private FileSystem getJarFileSystem(Path sourceJar) throws IOException { - FileSystem fs = filesystems.get(sourceJar); - if (fs == null) { -- filesystems.put(sourceJar, fs = FileSystems.newFileSystem(sourceJar, null)); -+ filesystems.put(sourceJar, fs = FileSystems.newFileSystem(sourceJar, (ClassLoader) null)); - } - return fs; - } \ No newline at end of file diff --git a/Golden_Repo/b/Bison/Bison-3.6.4-GCCcore-9.3.0.eb b/Golden_Repo/b/Bison/Bison-3.6.4-GCCcore-9.3.0.eb deleted file mode 100644 index e8ab66921c7a166b379324301dee00254d5f64e8..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Bison/Bison-3.6.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Bison' -version = '3.6.4' - -homepage = 'http://www.gnu.org/software/bison' -description = """Bison is a general-purpose parser generator that converts an annotated context-free grammar - into a deterministic LR or generalized LR (GLR) parser employing LALR(1) parser tables. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('M4', '1.4.18'), - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.34', '', SYSTEM), -] - - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["bison", "yacc"]] + ["lib/liby.a"], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/b/Bison/Bison-3.6.4.eb b/Golden_Repo/b/Bison/Bison-3.6.4.eb deleted file mode 100644 index 49e84cef476678b3a321fbc038818ed31c485dbf..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Bison/Bison-3.6.4.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Bison' -version = '3.6.4' - -homepage = 'http://www.gnu.org/software/bison' -description = """Bison is a general-purpose parser generator that converts an annotated context-free grammar -into a deterministic LR or generalized LR (GLR) parser employing LALR(1) parser tables. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [('M4', '1.4.18')] - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["bison", "yacc"]] + ["lib/liby.a"], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/b/Bison/Bison-3.7.6-GCCcore-10.3.0.eb b/Golden_Repo/b/Bison/Bison-3.7.6-GCCcore-10.3.0.eb deleted file mode 100644 index 454f69dd8332c904214b1de89aea5357e64b94d4..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Bison/Bison-3.7.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Bison' -version = '3.7.6' - -homepage = 'https://www.gnu.org/software/bison' -description = """Bison is a general-purpose parser generator that converts an annotated context-free grammar - into a deterministic LR or generalized LR (GLR) parser employing LALR(1) parser tables.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['69dc0bb46ea8fc307d4ca1e0b61c8c355eb207d0b0c69f4f8462328e74d7b9ea'] - -builddependencies = [ - ('M4', '1.4.18'), - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.36.1', '', True), -] - - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['bison', 'yacc']] + [('lib/liby.a', 'lib64/liby.a')], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/b/Bison/Bison-3.7.6.eb b/Golden_Repo/b/Bison/Bison-3.7.6.eb deleted file mode 100644 index 405d40e71d2e999d30030bb14e03529ba46f9831..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Bison/Bison-3.7.6.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Bison' -version = '3.7.6' - -homepage = 'https://www.gnu.org/software/bison' - -description = """ - Bison is a general-purpose parser generator that converts an annotated - context-free grammar into a deterministic LR or generalized LR (GLR) parser - employing LALR(1) parser tables. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['69dc0bb46ea8fc307d4ca1e0b61c8c355eb207d0b0c69f4f8462328e74d7b9ea'] - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['bison', 'yacc']] + [('lib/liby.a', 'lib64/liby.a')], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/b/Blender/Blender-2.90.1-binary.eb b/Golden_Repo/b/Blender/Blender-2.90.1-binary.eb deleted file mode 100644 index e134af2c7c055f39e1631f91b655e70f5460b5fc..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Blender/Blender-2.90.1-binary.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'PackedBinary' - -name = 'Blender' -version = '2.90.1' -versionsuffix = '-binary' - -homepage = 'https://www.blender.org' -description = """ -Blender is the free and open source 3D creation suite. It supports the entirety of the 3D pipeline, -modeling, rigging, animation, simulation, rendering, compositing and motion tracking, even video -editing and game creation. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['http://ftp.nluug.nl/pub/graphics/%(namelower)s/release/%(name)s%(version_major_minor)s/'] -sources = ['%(namelower)s-%(version)s-linux64.tar.xz'] -checksums = ['054668c46a3e56921f283709f51a35f7860786183001cf2ea9be3249d13ac667'] - -modextrapaths = { - 'PATH': '', - 'LD_LIBRARY_PATH': 'lib', -} - -sanity_check_paths = { - 'files': ['%(namelower)s'], - 'dirs': ['%(version_major_minor)s'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/b/Blender/Blender-2.93.1-binary.eb b/Golden_Repo/b/Blender/Blender-2.93.1-binary.eb deleted file mode 100644 index 8de0a125974f6dfc0330f35e73b417f37485e221..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Blender/Blender-2.93.1-binary.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'PackedBinary' - -name = 'Blender' -version = '2.93.1' -versionsuffix = '-binary' - -homepage = 'https://www.blender.org' -description = """ -Blender is the free and open source 3D creation suite. It supports the entirety of the 3D pipeline, -modeling, rigging, animation, simulation, rendering, compositing and motion tracking, even video -editing and game creation. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['http://ftp.nluug.nl/pub/graphics/%(namelower)s/release/%(name)s%(version_major_minor)s/'] -sources = ['%(namelower)s-%(version)s-linux-x64.tar.xz'] -checksums = ['6159316089de79ce0c3edec8cfdc5f08a6ca79e6a1fa8642538bc850c43970fe'] - -modextrapaths = { - 'PATH': '', - 'LD_LIBRARY_PATH': 'lib', -} - -sanity_check_paths = { - 'files': ['%(namelower)s'], - 'dirs': ['%(version_major_minor)s'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/b/Boost.Python/Boost-1.71.0_fix-Python3.patch b/Golden_Repo/b/Boost.Python/Boost-1.71.0_fix-Python3.patch deleted file mode 100644 index 3e2cb94bc4bb1228f6f7b46658458efdd51f034a..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost-1.71.0_fix-Python3.patch +++ /dev/null @@ -1,14 +0,0 @@ -also consider Python include directory for Python 3.x -author: Damian Alvarez (Juelich Supercomputing Centre) -updated by: Kenneth Hoste (HPC-UGent), Alex Domingo (VUB) ---- boost_1_71_0/tools/build/src/tools/python.jam.old 2019-12-19 15:11:46.626395000 +0100 -+++ boost_1_71_0/tools/build/src/tools/python.jam 2019-12-19 15:12:31.441437885 +0100 -@@ -544,7 +544,7 @@ - } - else - { -- includes ?= $(prefix)/include/python$(version) ; -+ includes ?= $(prefix)/include/python$(version) $(prefix)/include/python$(version)m ; - - local lib = $(exec-prefix)/lib ; - libraries ?= $(lib)/python$(version)/config $(lib) ; diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iimpi-2020.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iimpi-2020.eb deleted file mode 100644 index cc6b647f7feb1590a91efcc2a19e8da4bb95cbab..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iimpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.73.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iimpi-2021.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iimpi-2021.eb deleted file mode 100644 index ec35d5b03c155c6c3e9623204ee413623cb76c5b..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iimpi-2021.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.73.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iompi-2020.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iompi-2020.eb deleted file mode 100644 index 63d5a7d383b493c2293cf0273faefdcd8063c66e..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iompi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.73.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iompi-2021.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iompi-2021.eb deleted file mode 100644 index cb72098a030ff633da87c98d4d90952bae3f2f39..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-iompi-2021.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.73.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2020-mt.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2020-mt.eb deleted file mode 100644 index d76a6db32db076fc0ddb880763dba4e409e2d9e3..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.73.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2020.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2020.eb deleted file mode 100644 index bd5b1b2dc356700296d2108388e3064328bfcc3e..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.73.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2021.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2021.eb deleted file mode 100644 index 1a0d5019049d618c82f861195854934e8d043d36..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.73.0-ipsmpi-2021.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.73.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-GCCcore-10.3.0-nompi.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-GCCcore-10.3.0-nompi.eb deleted file mode 100644 index f6fc100b695ed14bc875c7fe5eeb093a1c43e176..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-GCCcore-10.3.0-nompi.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.74.0' -versionsuffix = '-nompi' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version, versionsuffix), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-GCCcore-9.3.0-nompi.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-GCCcore-9.3.0-nompi.eb deleted file mode 100644 index f87b2c9d2ea7efa4f2b89af5c3fa0819a746197f..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-GCCcore-9.3.0-nompi.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.74.0' -versionsuffix = '-nompi' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version, versionsuffix), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gompi-2020.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gompi-2020.eb deleted file mode 100644 index bbc2c03de4b721ae59f3f79c53130a93163ede9c..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gompi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.74.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gompi-2021.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gompi-2021.eb deleted file mode 100644 index 6d5a9a13e8c2ee5fa7e5b38af8760c6098b4ae37..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gompi-2021.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.74.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2020.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2020.eb deleted file mode 100644 index 45dfc0c2c783f2032d5d5595d84983cd6e8aecfd..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.74.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2021.eb b/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2021.eb deleted file mode 100644 index 93460991866b5a3cdd518ca39022cceb9b9691f7..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2021.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.74.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Golden_Repo/b/Boost/Boost-1.73.0-iimpi-2020.eb b/Golden_Repo/b/Boost/Boost-1.73.0-iimpi-2020.eb deleted file mode 100644 index 88dedde951882d76a2960063cfb4b8b7e13fb3c5..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.73.0-iimpi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.73.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.73.0-iimpi-2021.eb b/Golden_Repo/b/Boost/Boost-1.73.0-iimpi-2021.eb deleted file mode 100644 index 5e2c199ba94a65932f64b2786669e0ac7a1e73a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.73.0-iimpi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.73.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.73.0-iompi-2020.eb b/Golden_Repo/b/Boost/Boost-1.73.0-iompi-2020.eb deleted file mode 100644 index fee74bf0cf637a6423c5cc770c934b76d340ebeb..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.73.0-iompi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.73.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.73.0-iompi-2021.eb b/Golden_Repo/b/Boost/Boost-1.73.0-iompi-2021.eb deleted file mode 100644 index 5c11f9ececa183d83ca87f4ddd9e7f67d0af4ab0..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.73.0-iompi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.73.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2020-mt.eb b/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2020-mt.eb deleted file mode 100644 index fa199b3f892d8eee5a77e5e4cb0139c2a23e849e..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.73.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2020.eb b/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2020.eb deleted file mode 100644 index fe8ad90a0ee4b265308c350604bbdfd23f880da8..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.73.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2021.eb b/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2021.eb deleted file mode 100644 index 26bfc768bdd3c1843950b3b4f29943319fc78445..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.73.0-ipsmpi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.73.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.74.0-GCCcore-10.3.0-nompi.eb b/Golden_Repo/b/Boost/Boost-1.74.0-GCCcore-10.3.0-nompi.eb deleted file mode 100644 index 4bf103fb3a5b720babdf7cbcf6568e83aa791053..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.74.0-GCCcore-10.3.0-nompi.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'Boost' -version = '1.74.0' -versionsuffix = '-nompi' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# do not build boost_mpi -boost_mpi = False - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.74.0-GCCcore-9.3.0-nompi.eb b/Golden_Repo/b/Boost/Boost-1.74.0-GCCcore-9.3.0-nompi.eb deleted file mode 100644 index 493fb7c847023c65f299e62cd3461b2f441bbfa2..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.74.0-GCCcore-9.3.0-nompi.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'Boost' -version = '1.74.0' -versionsuffix = '-nompi' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# do not build boost_mpi -boost_mpi = False - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.74.0-gompi-2020.eb b/Golden_Repo/b/Boost/Boost-1.74.0-gompi-2020.eb deleted file mode 100644 index bfaab66db11d51d67edf0f5ac7b2b34292be1101..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.74.0-gompi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.74.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.74.0-gompi-2021.eb b/Golden_Repo/b/Boost/Boost-1.74.0-gompi-2021.eb deleted file mode 100644 index deadb24c458e19c217f056f289fd6a05441e8eb9..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.74.0-gompi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.74.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.74.0-gpsmpi-2020.eb b/Golden_Repo/b/Boost/Boost-1.74.0-gpsmpi-2020.eb deleted file mode 100644 index 96f4eebda8eaa62b5159531605c7ee9640971047..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.74.0-gpsmpi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.74.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/Boost/Boost-1.74.0-gpsmpi-2021.eb b/Golden_Repo/b/Boost/Boost-1.74.0-gpsmpi-2021.eb deleted file mode 100644 index 7552b1685a750c4802676c15b5d03ad49f615ee0..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/Boost/Boost-1.74.0-gpsmpi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'Boost' -version = '1.74.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Golden_Repo/b/basemap/basemap-1.2.2-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/b/basemap/basemap-1.2.2-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index 402c10054c4cc16e0f2dea6d9e5d5390cd719371..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/basemap/basemap-1.2.2-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'Bundle' - -name = 'basemap' -version = '1.2.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://matplotlib.org/basemap/' -description = """Basemap is a Python matplotlib toolkit for plotting data on maps. - -This is the last version of Basemap. Please move development efforts over to Cartopy! -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), - ('GEOS', '3.8.1', versionsuffix), - ('PROJ', '7.1.0'), -] - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -exts_list = [ - ('pyshp', '2.1.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pyshp'], - 'modulename': 'shapefile', - }), - ('pyproj', '2.6.1.post1', { # PyProj 3 needs PROJ 7.2.0, which was released after PROJ went into production - 'source_urls': ['https://pypi.python.org/packages/source/p/pyproj'], - 'prebuildopts': 'PROJ_DIR="$EBROOTPROJ" ', - 'preinstallopts': 'PROJ_DIR="$EBROOTPROJ" ', - }), - ('basemap', '1.2.2', { - 'source_urls': ['https://github.com/matplotlib/basemap/archive/'], - 'source_tmpl': 'v%(version)srel.tar.gz', - 'prebuildopts': 'GEOS_DIR="$EBROOTGEOS" ', - 'preinstallopts': 'GEOS_DIR="$EBROOTGEOS" ', - 'modulename': 'mpl_toolkits.basemap', - }), -] - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'vis' diff --git a/Golden_Repo/b/basemap/basemap-1.2.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/b/basemap/basemap-1.2.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 6892be2a0c371d4eb9e0ac43ac9ab14a297a97f5..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/basemap/basemap-1.2.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'Bundle' - -name = 'basemap' -version = '1.2.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://matplotlib.org/basemap/' -description = """Basemap is a Python matplotlib toolkit for plotting data on maps. - -This is the last version of Basemap. Please move development efforts over to Cartopy! -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), - ('GEOS', '3.8.1', versionsuffix), - ('PROJ', '7.1.0'), -] - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -exts_list = [ - ('pyshp', '2.1.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pyshp'], - 'modulename': 'shapefile', - }), - ('pyproj', '2.6.1.post1', { # PyProj 3 needs PROJ 7.2.0, which was released after PROJ went into production - 'source_urls': ['https://pypi.python.org/packages/source/p/pyproj'], - 'prebuildopts': 'PROJ_DIR="$EBROOTPROJ" ', - 'preinstallopts': 'PROJ_DIR="$EBROOTPROJ" ', - }), - ('basemap', '1.2.2', { - 'source_urls': ['https://github.com/matplotlib/basemap/archive/'], - 'source_tmpl': 'v%(version)srel.tar.gz', - 'prebuildopts': 'GEOS_DIR="$EBROOTGEOS" ', - 'preinstallopts': 'GEOS_DIR="$EBROOTGEOS" ', - 'modulename': 'mpl_toolkits.basemap', - }), -] - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'vis' diff --git a/Golden_Repo/b/binutils/binutils-2.32.eb b/Golden_Repo/b/binutils/binutils-2.32.eb deleted file mode 100644 index 3c0954bb123de218f13f231112c6e2be33116257..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/binutils/binutils-2.32.eb +++ /dev/null @@ -1,32 +0,0 @@ -name = 'binutils' -version = '2.32' - -homepage = 'https://directory.fsf.org/project/binutils/' - -description = "binutils: GNU binary utilities" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = [ - 'binutils-2.31.1-gold-ignore-discarded-note-relocts.patch', - 'binutils-2.32-readd-avx512-vmovd.patch', - 'binutils-2.32_gold-include-cpp-headers.patch', -] - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - # zlib required, but being linked in statically, so not a runtime dep - ('zlib', '1.2.11'), -] - -# avoid build failure when makeinfo command is not available -# see https://sourceware.org/bugzilla/show_bug.cgi?id=15345 -buildopts = 'MAKEINFO=true' -installopts = buildopts - -moduleclass = 'tools' diff --git a/Golden_Repo/b/binutils/binutils-2.34-GCCcore-9.3.0.eb b/Golden_Repo/b/binutils/binutils-2.34-GCCcore-9.3.0.eb deleted file mode 100644 index 7fff61993903d1fc28733e827664fbbe327d1d1a..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/binutils/binutils-2.34-GCCcore-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'binutils' -version = '2.34' - -homepage = 'https://directory.fsf.org/project/binutils/' -description = "binutils: GNU binary utilities" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = [ - 'binutils-2.31.1-gold-ignore-discarded-note-relocts.patch', - 'binutils-%(version)s-readd-avx512-vmovd.patch', -] -checksums = [ - '53537d334820be13eeb8acb326d01c7c81418772d626715c7ae927a7d401cab3', # binutils-2.34.tar.gz - # binutils-2.31.1-gold-ignore-discarded-note-relocts.patch - '17f22cc9136d0e81cfe8cbe310328c794a78a864e7fe7ca5827ee6678f65af32', - '45ecf7f5d198dd446d1a2e2a4d46b2747eb6fb8f2bfa18d7d42769e710e85716', # binutils-2.34-readd-avx512-vmovd.patch -] - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - # use same binutils version that was used when building GCC toolchain, to 'bootstrap' this binutils - ('binutils', version, '', SYSTEM) -] - -dependencies = [ - # zlib is a runtime dep to avoid that it gets embedded in libbfd.so, - # see https://github.com/easybuilders/easybuild-easyblocks/issues/1350 - ('zlib', '1.2.11'), -] - -# avoid build failure when makeinfo command is not available -# see https://sourceware.org/bugzilla/show_bug.cgi?id=15345 -buildopts = 'MAKEINFO=true' -installopts = buildopts - -moduleclass = 'tools' diff --git a/Golden_Repo/b/binutils/binutils-2.34.eb b/Golden_Repo/b/binutils/binutils-2.34.eb deleted file mode 100644 index a723de986256760e5af1b4fd7d9bf0275856d295..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/binutils/binutils-2.34.eb +++ /dev/null @@ -1,37 +0,0 @@ -name = 'binutils' -version = '2.34' - -homepage = 'https://directory.fsf.org/project/binutils/' - -description = "binutils: GNU binary utilities" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = [ - 'binutils-2.31.1-gold-ignore-discarded-note-relocts.patch', - 'binutils-2.34-readd-avx512-vmovd.patch', -] -checksums = [ - '53537d334820be13eeb8acb326d01c7c81418772d626715c7ae927a7d401cab3', # binutils-2.34.tar.gz - # binutils-2.31.1-gold-ignore-discarded-note-relocts.patch - '17f22cc9136d0e81cfe8cbe310328c794a78a864e7fe7ca5827ee6678f65af32', - '45ecf7f5d198dd446d1a2e2a4d46b2747eb6fb8f2bfa18d7d42769e710e85716', # binutils-2.34-readd-avx512-vmovd.patch -] - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - # zlib required, but being linked in statically, so not a runtime dep - ('zlib', '1.2.11'), -] - -# avoid build failure when makeinfo command is not available -# see https://sourceware.org/bugzilla/show_bug.cgi?id=15345 -buildopts = 'MAKEINFO=true' -installopts = buildopts - -moduleclass = 'tools' diff --git a/Golden_Repo/b/binutils/binutils-2.36.1-GCCcore-10.3.0.eb b/Golden_Repo/b/binutils/binutils-2.36.1-GCCcore-10.3.0.eb deleted file mode 100644 index 64240dc322def359394c9d4e9ac80720b48cae94..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/binutils/binutils-2.36.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'binutils' -version = '2.36.1' - -homepage = 'https://directory.fsf.org/project/binutils/' -description = "binutils: GNU binary utilities" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['e68edeaaeb6ca9687b6dcbaedd1b376506baad2d48de26a885fc5ab6acb839da'] - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.7.6'), - # use same binutils version that was used when building GCC toolchain, to 'bootstrap' this binutils - ('binutils', version, '', SYSTEM) -] - -dependencies = [ - # zlib is a runtime dep to avoid that it gets embedded in libbfd.so, - # see https://github.com/easybuilders/easybuild-easyblocks/issues/1350 - ('zlib', '1.2.11'), -] - -# avoid build failure when makeinfo command is not available -# see https://sourceware.org/bugzilla/show_bug.cgi?id=15345 -buildopts = 'MAKEINFO=true' -installopts = buildopts - -moduleclass = 'tools' diff --git a/Golden_Repo/b/binutils/binutils-2.36.1.eb b/Golden_Repo/b/binutils/binutils-2.36.1.eb deleted file mode 100644 index dfa1f1f12c7f53782307418b7e4897d0afc183a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/binutils/binutils-2.36.1.eb +++ /dev/null @@ -1,28 +0,0 @@ -name = 'binutils' -version = '2.36.1' - -homepage = 'https://directory.fsf.org/project/binutils/' - -description = "binutils: GNU binary utilities" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['e68edeaaeb6ca9687b6dcbaedd1b376506baad2d48de26a885fc5ab6acb839da'] - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.7.6'), - # zlib required, but being linked in statically, so not a runtime dep - ('zlib', '1.2.11'), -] - -# avoid build failure when makeinfo command is not available -# see https://sourceware.org/bugzilla/show_bug.cgi?id=15345 -buildopts = 'MAKEINFO=true' -installopts = buildopts - -moduleclass = 'tools' diff --git a/Golden_Repo/b/byacc/byacc-20200330-GCCcore-10.3.0.eb b/Golden_Repo/b/byacc/byacc-20200330-GCCcore-10.3.0.eb deleted file mode 100644 index eb307ad4616e6ca42c37d5486d46d6b01373fcba..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/byacc/byacc-20200330-GCCcore-10.3.0.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'byacc' -version = '20200330' - -homepage = 'http://invisible-island.net/byacc/byacc.html' -description = """Berkeley Yacc (byacc) is generally conceded to be the best yacc variant available. - In contrast to bison, it is written to avoid dependencies upon a particular compiler. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TGZ] -source_urls = ['ftp://ftp.invisible-island.net/byacc'] - -builddependencies = [('binutils', '2.36.1')] - -sanity_check_paths = { - 'files': ["bin/yacc"], - 'dirs': [] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/b/byacc/byacc-20200330-GCCcore-9.3.0.eb b/Golden_Repo/b/byacc/byacc-20200330-GCCcore-9.3.0.eb deleted file mode 100644 index 74599ac889d49bb5c6d373126bda6210c675c70c..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/byacc/byacc-20200330-GCCcore-9.3.0.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'byacc' -version = '20200330' - -homepage = 'http://invisible-island.net/byacc/byacc.html' -description = """Berkeley Yacc (byacc) is generally conceded to be the best yacc variant available. - In contrast to bison, it is written to avoid dependencies upon a particular compiler. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TGZ] -source_urls = ['ftp://ftp.invisible-island.net/byacc'] - -builddependencies = [('binutils', '2.34')] - -sanity_check_paths = { - 'files': ["bin/yacc"], - 'dirs': [] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/b/bzip2/bzip2-1.0.6-pkgconfig.patch b/Golden_Repo/b/bzip2/bzip2-1.0.6-pkgconfig.patch deleted file mode 100644 index f477e4a134b3db028d34e378a27040e8d1f30ec5..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/bzip2/bzip2-1.0.6-pkgconfig.patch +++ /dev/null @@ -1,33 +0,0 @@ -#- Adds a pkgconfig/bzip2.pc file -# -# author: Jiri Furst <jiri.furst@gmail.com> -# inspired by OpenSUSE patch by Stanislav Brabec <sbrabec@suse.cz>, see -# http://ftp.suse.com/pub/people/sbrabec/bzip2/ -diff -Nau bzip2-1.0.6.orig/bzip2.pc.in bzip2-1.0.6/bzip2.pc.in ---- bzip2-1.0.6.orig/bzip2.pc.in 1970-01-01 01:00:00.000000000 +0100 -+++ bzip2-1.0.6/bzip2.pc.in 2019-05-01 11:47:29.795517973 +0200 -@@ -0,0 +1,11 @@ -+exec_prefix=${prefix} -+bindir=${exec_prefix}/bin -+libdir=${exec_prefix}/lib -+includedir=${prefix}/include -+ -+Name: bzip2 -+Description: Lossless, block-sorting data compression -+Version: 1.0.6 -+Libs: -L${libdir} -lbz2 -+Cflags: -I${includedir} -+ -diff -Nau bzip2-1.0.6.orig/Makefile bzip2-1.0.6/Makefile ---- bzip2-1.0.6.orig/Makefile 2019-05-01 11:28:04.788206974 +0200 -+++ bzip2-1.0.6/Makefile 2019-05-01 11:46:20.911324226 +0200 -@@ -107,6 +107,9 @@ - echo ".so man1/bzgrep.1" > $(PREFIX)/man/man1/bzfgrep.1 - echo ".so man1/bzmore.1" > $(PREFIX)/man/man1/bzless.1 - echo ".so man1/bzdiff.1" > $(PREFIX)/man/man1/bzcmp.1 -+ if ( test ! -d $(PREFIX)/lib/pkgconfig ) ; then mkdir -p $(PREFIX)/lib/pkgconfig ; fi -+ echo "prefix=$(PREFIX)" > $(PREFIX)/lib/pkgconfig/bzip2.pc -+ cat bzip2.pc.in >> $(PREFIX)/lib/pkgconfig/bzip2.pc - - clean: - rm -f *.o libbz2.a bzip2 bzip2recover \ diff --git a/Golden_Repo/b/bzip2/bzip2-1.0.8-GCCcore-10.3.0.eb b/Golden_Repo/b/bzip2/bzip2-1.0.8-GCCcore-10.3.0.eb deleted file mode 100644 index 4f97fe36a4ea1880f21997096f3570eed35c3739..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/bzip2/bzip2-1.0.8-GCCcore-10.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'bzip2' -version = '1.0.8' - -homepage = 'http://www.bzip.org/' -description = """bzip2 is a freely available, patent free, high-quality data -compressor. It typically compresses files to within 10% to 15% of the best -available techniques (the PPM family of statistical compressors), whilst being -around twice as fast at compression and six times faster at decompression. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://sourceware.org/pub/%(name)s/'] -sources = [SOURCE_TAR_GZ] -patches = ['bzip2-1.0.6-pkgconfig.patch'] -checksums = [ - 'ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269', # bzip2-1.0.8.tar.gz - '5a823e820b332eca3684416894f58edc125ac3dace9f46e62f98e45362aa8a6d', # bzip2-1.0.6-pkgconfig.patch -] - -builddependencies = [('binutils', '2.36.1')] - -moduleclass = 'tools' diff --git a/Golden_Repo/b/bzip2/bzip2-1.0.8-GCCcore-9.3.0.eb b/Golden_Repo/b/bzip2/bzip2-1.0.8-GCCcore-9.3.0.eb deleted file mode 100644 index bf650ff72bb4fd3dd863800167e9649f8b592323..0000000000000000000000000000000000000000 --- a/Golden_Repo/b/bzip2/bzip2-1.0.8-GCCcore-9.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'bzip2' -version = '1.0.8' - -homepage = 'http://www.bzip.org/' -description = """bzip2 is a freely available, patent free, high-quality data -compressor. It typically compresses files to within 10% to 15% of the best -available techniques (the PPM family of statistical compressors), whilst being -around twice as fast at compression and six times faster at decompression. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://sourceware.org/pub/%(name)s/'] -sources = [SOURCE_TAR_GZ] -patches = ['bzip2-1.0.6-pkgconfig.patch'] -checksums = [ - 'ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269', # bzip2-1.0.8.tar.gz - '5a823e820b332eca3684416894f58edc125ac3dace9f46e62f98e45362aa8a6d', # bzip2-1.0.6-pkgconfig.patch -] - -builddependencies = [('binutils', '2.34')] - -moduleclass = 'tools' diff --git a/Golden_Repo/c/CDO/CDO-1.9.8-gpsmpi-2020.eb b/Golden_Repo/c/CDO/CDO-1.9.8-gpsmpi-2020.eb deleted file mode 100644 index a8ff27662adb03ce16b43a8bb347e4d6bb52d1a9..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CDO/CDO-1.9.8-gpsmpi-2020.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'CDO' -version = '1.9.8' - -homepage = 'https://code.zmaw.de/projects/cdo' -description = """CDO is a collection of command line Operators to manipulate and analyse Climate and NWP model Data.""" - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -# stick to lowopt (-O1) to avoid internal compiler error when building on Intel Skylake -toolchainopts = {'pic': True, 'usempi': True, 'lowopt': True} - -source_urls = ['https://code.mpimet.mpg.de/attachments/download/20826/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['f2660ac6f8bf3fa071cf2a3a196b3ec75ad007deb3a782455e80f28680c5252a'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('netCDF', '4.7.4'), - ('YAXT', '0.9.0'), - ('ecCodes', '2.18.0'), -] - - -configopts = "--with-hdf5=$EBROOTHDF5 --with-netcdf=$EBROOTNETCDF --with-eccodes=$EBROOTECCODES --with-szlib=yes" - -# fix for linking issues with HDF5 libraries for libcdi, should link with both -lnetcdf and -lhdf5_hl -lhdf5 -# prebuildopts = "find libcdi -name Makefile | xargs sed -i 's/-lnetcdf -lnetcdf/-lnetcdf -lhdf5_hl -lhdf5/g' && " -preconfigopts = 'export CFLAGS="$CFLAGS -lhdf5" && ' -sanity_check_paths = { - 'files': ['bin/cdo'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/c/CFITSIO/CFITSIO-3.48_install_test_data.patch b/Golden_Repo/c/CFITSIO/CFITSIO-3.48_install_test_data.patch deleted file mode 100644 index efa6eb9bc908bd69491d9a36c6fd708830423165..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CFITSIO/CFITSIO-3.48_install_test_data.patch +++ /dev/null @@ -1,27 +0,0 @@ -part of CFITSIO installation is "testprog". -Let's copy its data (testprog.tpt) into ${installdir}/share to be able use it as sanity_check_program. -Josef Dvoracek | Institute of Physics | Czech Academy of Sciences | 2019-06-10 -Bart Oldeman | Compute Canada | 2020-07-03 (adapted to CFITSIO 3.48) - -diff -Nru cfitsio-3.48.orig/Makefile.in cfitsio-3.48/Makefile.in ---- cfitsio-3.48.orig/Makefile.in 2020-04-29 15:52:34.000000000 -0000 -+++ cfitsio-3.48/Makefile.in 2020-07-03 19:45:37.974782002 -0000 -@@ -30,7 +30,9 @@ - CFITSIO_BIN = ${DESTDIR}@bindir@ - CFITSIO_LIB = ${DESTDIR}@libdir@ - CFITSIO_INCLUDE = ${DESTDIR}@includedir@ --INSTALL_DIRS = ${DESTDIR}@INSTALL_ROOT@ ${CFITSIO_INCLUDE} ${CFITSIO_LIB} ${CFITSIO_LIB}/pkgconfig -+CFITSIO_DATADIR = ${DESTDIR}@datadir@ -+ -+INSTALL_DIRS = ${DESTDIR}@INSTALL_ROOT@ ${CFITSIO_INCLUDE} ${CFITSIO_LIB} ${CFITSIO_LIB}/pkgconfig ${CFITSIO_DATADIR} - - - SHELL = /bin/sh -@@ -118,6 +120,7 @@ - fi; \ - done - /bin/cp fitsio.h fitsio2.h longnam.h drvrsmem.h ${CFITSIO_INCLUDE} -+ /bin/cp testprog.tpt ${CFITSIO_DATADIR} - /bin/cp cfitsio.pc ${CFITSIO_LIB}/pkgconfig - @for task in ${FPACK_UTILS} ${UTILS}; do \ - if [ -f $$task ]; then \ \ No newline at end of file diff --git a/Golden_Repo/c/CFITSIO/CFITSIO-3.490-GCCcore-10.3.0.eb b/Golden_Repo/c/CFITSIO/CFITSIO-3.490-GCCcore-10.3.0.eb deleted file mode 100644 index 3e79d70e920bd1bc82b558dec295256cf898eccf..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CFITSIO/CFITSIO-3.490-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'CFITSIO' -version = '3.490' - -homepage = 'http://heasarc.gsfc.nasa.gov/fitsio/' -description = """CFITSIO is a library of C and Fortran subroutines for reading and writing data files in -FITS (Flexible Image Transport System) data format. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -local_srcversion = version[:-1] -source_urls = ['http://heasarc.gsfc.nasa.gov/FTP/software/fitsio/c/'] -sources = ['%%(namelower)s-%s.tar.gz' % local_srcversion] -patches = ['CFITSIO-3.48_install_test_data.patch'] - -dependencies = [('cURL', '7.71.1')] - - -builddependencies = [ - ('binutils', '2.36.1'), -] - -# make would create just static libcfitsio.a. -# Let's create dynamic lib and testprog too. -buildopts = '&& make shared && make testprog' - -sanity_check_paths = { - 'files': ['lib/libcfitsio.a', 'lib/libcfitsio.%s' % SHLIB_EXT], - 'dirs': ['include'], -} - -sanity_check_commands = [ - ('cd %(installdir)s/share && testprog'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/c/CFITSIO/CFITSIO-3.490-GCCcore-9.3.0.eb b/Golden_Repo/c/CFITSIO/CFITSIO-3.490-GCCcore-9.3.0.eb deleted file mode 100644 index 03b26caf8dae24d8135d0535218099b4636ecd0f..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CFITSIO/CFITSIO-3.490-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'CFITSIO' -version = '3.490' - -homepage = 'http://heasarc.gsfc.nasa.gov/fitsio/' -description = """CFITSIO is a library of C and Fortran subroutines for reading and writing data files in -FITS (Flexible Image Transport System) data format. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -local_srcversion = version[:-1] -source_urls = ['http://heasarc.gsfc.nasa.gov/FTP/software/fitsio/c/'] -sources = ['%%(namelower)s-%s.tar.gz' % local_srcversion] -patches = ['CFITSIO-3.48_install_test_data.patch'] - -dependencies = [('cURL', '7.71.1')] - - -builddependencies = [ - ('binutils', '2.34'), -] - -# make would create just static libcfitsio.a. -# Let's create dynamic lib and testprog too. -buildopts = '&& make shared && make testprog' - -sanity_check_paths = { - 'files': ['lib/libcfitsio.a', 'lib/libcfitsio.%s' % SHLIB_EXT], - 'dirs': ['include'], -} - -sanity_check_commands = [ - ('cd %(installdir)s/share && testprog'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/c/CGAL/CGAL-4.13.1-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/c/CGAL/CGAL-4.13.1-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index 06d27d0c88b84c1fa2aa6ad4ba30af8c7d1c49d1..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CGAL/CGAL-4.13.1-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'CGAL' -version = '4.13.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.cgal.org/' -description = """The goal of the CGAL Open Source Project is to provide easy access to efficient - and reliable geometric algorithms in the form of a C++ library. - """ - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'strict': True} - -sources = [SOURCE_TAR_XZ] -source_urls = ['https://github.com/%(name)s/%(namelower)s/releases/download/releases/%(name)s-%(version)s/'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Eigen', '3.3.7'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('Python', '3.8.5'), - ('Boost', '1.74.0'), - ('MPFR', '4.1.0'), - ('GMP', '6.2.0'), - ('OpenGL', '2020'), - ('Qt5', '5.14.2'), -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/c/CGAL/CGAL-4.13.1-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/c/CGAL/CGAL-4.13.1-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 334d068cdce7c6d19564c37a892c8d7e964ef03c..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CGAL/CGAL-4.13.1-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,34 +0,0 @@ -name = 'CGAL' -version = '4.13.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.cgal.org/' -description = """The goal of the CGAL Open Source Project is to provide easy access to efficient - and reliable geometric algorithms in the form of a C++ library. - """ - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'strict': True} - -sources = [SOURCE_TAR_XZ] -source_urls = [ - 'https://github.com/%(name)s/%(namelower)s/releases/download/releases/%(name)s-%(version)s/'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Eigen', '3.3.7'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('Python', '3.8.5'), - ('Boost', '1.74.0'), - ('MPFR', '4.1.0'), - ('GMP', '6.2.0'), - ('OpenGL', '2020'), - ('Qt5', '5.14.2'), -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/c/CGAL/CGAL-5.1-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/c/CGAL/CGAL-5.1-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index f41152894321745202c8a886db6622d0604a0031..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CGAL/CGAL-5.1-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,40 +0,0 @@ -name = 'CGAL' -version = '5.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.cgal.org/' -description = """The goal of the CGAL Open Source Project is to provide easy access to efficient - and reliable geometric algorithms in the form of a C++ library. - """ - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'strict': True} - -sources = [SOURCE_TAR_XZ] -source_urls = ['https://github.com/%(name)s/%(namelower)s/releases/download/v%(version)s/'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Eigen', '3.3.7'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('Python', '3.8.5'), - ('Boost.Python', '1.74.0'), - ('MPFR', '4.1.0'), - ('GMP', '6.2.0'), - ('OpenGL', '2020'), - ('Qt5', '5.14.2'), -] - -configopts = "-DCGAL_HEADER_ONLY=OFF -DCMAKE_BUILD_TYPE=Release " -configopts += "-DOPENGL_INCLUDE_DIR=$EBROOTOPENGL/include\; " -configopts += "-DOPENGL_gl_LIBRARY=$EBROOTOPENGL/lib/libGL.%s " % SHLIB_EXT -configopts += "-DOPENGL_glu_LIBRARY=$EBROOTOPENGL/lib/libGLU.%s " % SHLIB_EXT -configopts += "-DWITH_ZLIB=ON -DWITH_MPFR=ON -DWITH_OpenGL=ON -DWITH_Eigen3=ON " -configopts += "-DWITH_GMPXX=ON " - -moduleclass = 'numlib' diff --git a/Golden_Repo/c/CGAL/CGAL-5.1-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/c/CGAL/CGAL-5.1-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 770003f00dd3ba43c16e3915443e9cb5cd686de3..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CGAL/CGAL-5.1-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -name = 'CGAL' -version = '5.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.cgal.org/' -description = """The goal of the CGAL Open Source Project is to provide easy access to efficient - and reliable geometric algorithms in the form of a C++ library. - """ - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'strict': True} - -sources = [SOURCE_TAR_XZ] -source_urls = [ - 'https://github.com/%(name)s/%(namelower)s/releases/download/v%(version)s/'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Eigen', '3.3.7'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('Python', '3.8.5'), - ('Boost.Python', '1.74.0'), - ('MPFR', '4.1.0'), - ('GMP', '6.2.0'), - ('OpenGL', '2020'), - ('Qt5', '5.14.2'), -] - -configopts = "-DCGAL_HEADER_ONLY=OFF -DCMAKE_BUILD_TYPE=Release " -configopts += "-DOPENGL_INCLUDE_DIR=$EBROOTOPENGL/include\; " -configopts += "-DOPENGL_gl_LIBRARY=$EBROOTOPENGL/lib/libGL.%s " % SHLIB_EXT -configopts += "-DOPENGL_glu_LIBRARY=$EBROOTOPENGL/lib/libGLU.%s " % SHLIB_EXT -configopts += "-DWITH_ZLIB=ON -DWITH_MPFR=ON -DWITH_OpenGL=ON -DWITH_Eigen3=ON " -configopts += "-DWITH_GMPXX=ON " - -moduleclass = 'numlib' diff --git a/Golden_Repo/c/CGNS/CGNS-4.1.1-gpsmpi-2020.eb b/Golden_Repo/c/CGNS/CGNS-4.1.1-gpsmpi-2020.eb deleted file mode 100644 index c3b439f031be2ff4574746675416fee6b253bd18..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CGNS/CGNS-4.1.1-gpsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -# eb for CGNS -easyblock = 'CMakeMake' - -name = 'CGNS' -version = '4.1.1' - -homepage = 'https://cgns.github.io/' -description = """The CGNS system is designed to facilitate the exchange -of data between sites and applications, and to help stabilize the archiving -of aerodynamic data.""" - -site_contacts = 's.koh@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = ['https://github.com/CGNS/CGNS/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['055d345c3569df3ae832fb2611cd7e0bc61d56da41b2be1533407e949581e226'] - -dependencies = [ - ('HDF5', '1.10.6'), -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["cgnscheck", "cgnscompress", - "cgnsconvert", "cgnsdiff", "cgnslist", "cgnsnames", - "cgnsupdate"]], - 'dirs': [], -} - -moduleclass = 'cae' diff --git a/Golden_Repo/c/CMake/CMake-3.18.0-GCCcore-9.3.0.eb b/Golden_Repo/c/CMake/CMake-3.18.0-GCCcore-9.3.0.eb deleted file mode 100644 index e5773531de384da51a84195976ebd868bf53d7e7..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CMake/CMake-3.18.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'CMake' -version = '3.18.0' - -homepage = 'http://www.cmake.org' -description = """CMake, the cross-platform, open-source build system. - CMake is a family of tools designed to build, test and package software. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://www.cmake.org/files/v%(version_major_minor)s'] -sources = [SOURCELOWER_TAR_GZ] - -configopts = '-- -DCMAKE_USE_OPENSSL=1 -DCMAKE_PREFIX_PATH=$EBROOTNCURSES' - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('ncurses', '6.2'), - # OS dependency should be preferred if the os version is more recent then this version, - # it's nice to have an up to date openssl for security reasons - # ('OpenSSL', '1.0.1p'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ['cmake', 'cpack', 'ctest']], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/c/CMake/CMake-3.18.0.eb b/Golden_Repo/c/CMake/CMake-3.18.0.eb deleted file mode 100644 index 71cfad55d538d6ed6f1d981b4d20f11aa74be75f..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CMake/CMake-3.18.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'CMake' -version = '3.18.0' - -homepage = 'http://www.cmake.org' -description = """CMake, the cross-platform, open-source build system. - CMake is a family of tools designed to build, test and package software. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['http://www.cmake.org/files/v%(version_major_minor)s'] -sources = [SOURCELOWER_TAR_GZ] - -configopts = '-- -DCMAKE_USE_OPENSSL=1 -DCMAKE_PREFIX_PATH=$EBROOTNCURSES' - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('ncurses', '6.2'), - # OS dependency should be preferred if the os version is more recent then this version, - # it's nice to have an up to date openssl for security reasons - # ('OpenSSL', '1.0.1p'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ['cmake', 'cpack', 'ctest']], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/c/CP2K/CP2K-7.1.0-intel-para-2020.eb b/Golden_Repo/c/CP2K/CP2K-7.1.0-intel-para-2020.eb deleted file mode 100644 index 5796f5b2cd67faba9fcdd114095dae1ac2908d46..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CP2K/CP2K-7.1.0-intel-para-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -name = 'CP2K' -version = '7.1.0' - -homepage = 'http://www.cp2k.org/' -description = """CP2K is a freely available (GPL) program, written in Fortran 95, to perform atomistic and molecular - simulations of solid state, liquid, molecular and biological systems. It provides a general framework for different - methods such as e.g. density functional theory (DFT) using a mixed Gaussian and plane waves approach (GPW), and - classical pair and many-body potentials. -""" - -site_contacts = 'th.mueller@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'pic': True} - -local_dbcsr_version = '2.0.1' - -sources = [ - 'v%(version)s.tar.gz', - 'v%s.tar.gz' % local_dbcsr_version, -] -source_urls = [ - 'https://github.com/cp2k/cp2k/archive/', - 'https://github.com/cp2k/dbcsr/archive/' -] - -patches = [ - 'CP2K-7.1_fftw3_lib.patch', -] - -dependencies = [ - ('Libint', '2.7.0-beta.6', '_cp2k_lmax5'), - ('libxc', '4.3.4'), - ('PLUMED', '2.6.1'), - ('ELPA', '2020.05.001'), - ('libxsmm', '1.16.1'), - ('FFTW', '3.3.8'), -] - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), -] - -# Add PLUMED support -plumed = True - -# Disable CUDA -cuda = False - -# explicit unrolled loops up to __MAX_CONTR, 4 gives excessive compiler times -configopts = '-D__MAX_CONTR=3' - -# popt or psmp -type = 'psmp' - -# run tests separately (2 nodes of juwels approx 1 hour) -runtest = False - -# regression test reports failures -ignore_regtest_fails = False - -modextravars = { - 'CP2K_DATA_DIR': '%(installdir)s/data', -} - -moduleclass = 'chem' diff --git a/Golden_Repo/c/CP2K/CP2K-7.1_fftw3_lib.patch b/Golden_Repo/c/CP2K/CP2K-7.1_fftw3_lib.patch deleted file mode 100644 index 9774de4f11589628db6ef6e7d8de3e4e0cc343e0..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CP2K/CP2K-7.1_fftw3_lib.patch +++ /dev/null @@ -1,18 +0,0 @@ ---- src/pw/fft/fftw3_lib.F_orig 2020-01-24 11:26:42.751462151 +0100 -+++ src/pw/fft/fftw3_lib.F 2020-01-24 11:26:08.503738878 +0100 -@@ -119,6 +119,7 @@ - - ! Now check if we have a real FFTW3 library, or are using MKL wrappers - -+#if defined (__MKL) && defined (__FFTW3) - !$ IF (fftw3_is_mkl_wrapper() .and. omp_get_max_threads() .gt. 1) THEN - ! If we are not using the Intel compiler, there is no way to tell which - ! MKL version is in use, so fail safe... -@@ -145,6 +146,7 @@ - !$ "Now exiting..." - !$ ENDIF - !$ ENDIF -+#endif - #else - MARK_USED(wisdom_file) - #endif diff --git a/Golden_Repo/c/CPMD/CPMD-4.3-intel-2020.eb b/Golden_Repo/c/CPMD/CPMD-4.3-intel-2020.eb deleted file mode 100644 index 57b135adaa51f92b586b91fba94064c06c0ddac0..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CPMD/CPMD-4.3-intel-2020.eb +++ /dev/null @@ -1,48 +0,0 @@ -name = 'CPMD' -version = '4.3' - -homepage = 'http://cpmd.org' -description = """The CPMD code is a parallelized plane wave / pseudopotential -implementation of Density Functional Theory, particularly designed for -ab-initio molecular dynamics. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'usempi': True} - -# This package requires registration prior to download. Having registered, -# you can download the source code from http://cpmd.org/download, then put -# it in your local sources directory. -sources = [ - '%(namelower)s-v%(version)s.tar.gz', - 'pseudo-extlib.tar.gz', - 'pseudo_std.tar.gz', - 'pseudo_vdb.tar.gz' -] - -# These patches are on the source directory, not on the git repo, as they come from CPMD -patches = [ - '%(namelower)s-v%(version)s-4612.patch', - '%(namelower)s-v%(version)s-4615.patch', - '%(namelower)s-v%(version)s-4616.patch', - '%(namelower)s-v%(version)s-4621.patch', - '%(namelower)s-v%(version)s-4624.patch', - 'cppflags.patch' -] - -prefix_opt = '-DEST=' - -group = "cpmd" - -sanity_check_paths = { - 'files': ['bin/cpmd.x', 'lib/libcpmd.a'], - 'dirs': ['bin', 'lib'], -} - -modloadmsg = 'MPI-Version: cpmd.x \n' -modloadmsg += '\n' -modloadmsg += 'NOTE: This software is restricted to members of the group cpmd\n' - -moduleclass = 'chem' diff --git a/Golden_Repo/c/CPMD/CPMD-4.3-intel-para-2020.eb b/Golden_Repo/c/CPMD/CPMD-4.3-intel-para-2020.eb deleted file mode 100644 index 8ffdd5ce0bab38deff61c29e1ba324241d61ed8b..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CPMD/CPMD-4.3-intel-para-2020.eb +++ /dev/null @@ -1,48 +0,0 @@ -name = 'CPMD' -version = '4.3' - -homepage = 'http://cpmd.org' -description = """The CPMD code is a parallelized plane wave / pseudopotential -implementation of Density Functional Theory, particularly designed for -ab-initio molecular dynamics. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True} - -# This package requires registration prior to download. Having registered, -# you can download the source code from http://cpmd.org/download, then put -# it in your local sources directory. -sources = [ - '%(namelower)s-v%(version)s.tar.gz', - 'pseudo-extlib.tar.gz', - 'pseudo_std.tar.gz', - 'pseudo_vdb.tar.gz' -] - -# These patches are on the source directory, not on the git repo, as they come from CPMD -patches = [ - '%(namelower)s-v%(version)s-4612.patch', - '%(namelower)s-v%(version)s-4615.patch', - '%(namelower)s-v%(version)s-4616.patch', - '%(namelower)s-v%(version)s-4621.patch', - '%(namelower)s-v%(version)s-4624.patch', - 'cppflags.patch' -] - -prefix_opt = '-DEST=' - -group = "cpmd" - -sanity_check_paths = { - 'files': ['bin/cpmd.x', 'lib/libcpmd.a'], - 'dirs': ['bin', 'lib'], -} - -modloadmsg = 'MPI-Version: cpmd.x \n' -modloadmsg += '\n' -modloadmsg += 'NOTE: This software is restricted to members of the group cpmd\n' - -moduleclass = 'chem' diff --git a/Golden_Repo/c/CPMD/CPMD-4.3-iomkl-2020.eb b/Golden_Repo/c/CPMD/CPMD-4.3-iomkl-2020.eb deleted file mode 100644 index 2051f2cbb4fcca85c059ae375e20c8a8c993b6cf..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CPMD/CPMD-4.3-iomkl-2020.eb +++ /dev/null @@ -1,48 +0,0 @@ -name = 'CPMD' -version = '4.3' - -homepage = 'http://cpmd.org' -description = """The CPMD code is a parallelized plane wave / pseudopotential -implementation of Density Functional Theory, particularly designed for -ab-initio molecular dynamics. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'iomkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -# This package requires registration prior to download. Having registered, -# you can download the source code from http://cpmd.org/download, then put -# it in your local sources directory. -sources = [ - '%(namelower)s-v%(version)s.tar.gz', - 'pseudo-extlib.tar.gz', - 'pseudo_std.tar.gz', - 'pseudo_vdb.tar.gz' -] - -# These patches are on the source directory, not on the git repo, as they come from CPMD -patches = [ - '%(namelower)s-v%(version)s-4612.patch', - '%(namelower)s-v%(version)s-4615.patch', - '%(namelower)s-v%(version)s-4616.patch', - '%(namelower)s-v%(version)s-4621.patch', - '%(namelower)s-v%(version)s-4624.patch', - 'cppflags.patch' -] - -prefix_opt = '-DEST=' - -group = "cpmd" - -sanity_check_paths = { - 'files': ['bin/cpmd.x', 'lib/libcpmd.a'], - 'dirs': ['bin', 'lib'], -} - -modloadmsg = 'MPI-Version: cpmd.x \n' -modloadmsg += '\n' -modloadmsg += 'NOTE: This software is restricted to members of the group cpmd\n' - -moduleclass = 'chem' diff --git a/Golden_Repo/c/CPMD/cppflags.patch b/Golden_Repo/c/CPMD/cppflags.patch deleted file mode 100644 index bfec237c09f06a6cccdfa13289b32180dd6aa07b..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CPMD/cppflags.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff -ruN CPMD/configure/LINUX-X86_64-INTEL-MPI-FFTW CPMD.patched/configure/LINUX-X86_64-INTEL-MPI-FFTW ---- CPMD/configure/LINUX-X86_64-INTEL-MPI-FFTW 2018-12-20 15:13:34.000000000 +0100 -+++ CPMD.patched/configure/LINUX-X86_64-INTEL-MPI-FFTW 2019-04-09 20:47:20.366017966 +0200 -@@ -17,8 +17,7 @@ - FFLAGS_GROMOS='$(FFLAGS) -fixed' - FFLAGS_GROMOS_MODULES='$(FFLAGS)' - CPP='/usr/bin/cpp -P -C -traditional' -- CPPFLAGS='-D__Linux -D__HAS_FFT_FFTW3 -D__PARALLEL -DLINUX_IFC -D__HASNT_OMP_45 \ -- -D__HASNT_F03_EXECUTE_COMMAND_LINE -D__HASNT_F08_ISO_FORTRAN_ENV -D_HASNT_MPI_30' -+ CPPFLAGS='-D__Linux -D__HAS_FFT_FFTW3 -D__PARALLEL -DLINUX_IFC -D__HASNT_OMP_45 -D__HASNT_F03_EXECUTE_COMMAND_LINE -D__HASNT_F08_ISO_FORTRAN_ENV -D_HASNT_MPI_30' - NOOPT_FLAG=' -O1 ' - NOOPT_OBJS=' jrotation_utils.mod.o ' - AR='/usr/bin/ar ruv' diff --git a/Golden_Repo/c/CUDA/CUDA-11.0.eb b/Golden_Repo/c/CUDA/CUDA-11.0.eb deleted file mode 100644 index fd208bb5e12b445fbb59208863b00fcc55843445..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CUDA/CUDA-11.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -name = 'CUDA' -version = '11.0' -local_complete_version = '%(version)s.3' - -homepage = 'https://developer.nvidia.com/cuda-toolkit' -description = """CUDA (formerly Compute Unified Device Architecture) is a parallel - computing platform and programming model created by NVIDIA and implemented by the - graphics processing units (GPUs) that they produce. CUDA gives developers access - to the virtual instruction set and memory of the parallel computational elements - in CUDA GPUs. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [ - 'http://developer.download.nvidia.com/compute/cuda/%(version_major_minor)s/Prod/local_installers/', - 'https://developer.nvidia.com/compute/cuda/%(version_major_minor)s/prod/local_installers/', - 'https://developer.nvidia.com/compute/cuda/%(version_major_minor)s/Prod2/local_installers/', -] - -sources = [ - '%%(namelower)s_%s_450.51.06_linux.run' % local_complete_version, -] - -dependencies = [ - ('nvidia-driver', 'default', '', SYSTEM), -] - -installopts = '--samplespath=%(installdir)s --samples' - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'system' diff --git a/Golden_Repo/c/CUDA/CUDA-11.3.eb b/Golden_Repo/c/CUDA/CUDA-11.3.eb deleted file mode 100644 index 271ba75fb9b450776f073f4fceff2b4590793394..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CUDA/CUDA-11.3.eb +++ /dev/null @@ -1,38 +0,0 @@ -name = 'CUDA' -version = '11.3' -local_complete_version = '%(version)s.1' - -homepage = 'https://developer.nvidia.com/cuda-toolkit' -description = """CUDA (formerly Compute Unified Device Architecture) is a parallel - computing platform and programming model created by NVIDIA and implemented by the - graphics processing units (GPUs) that they produce. CUDA gives developers access - to the virtual instruction set and memory of the parallel computational elements - in CUDA GPUs. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [ - 'https://developer.download.nvidia.com/compute/cuda/%(local_complete_version)s/local_installers/' - 'https://developer.download.nvidia.com/compute/cuda/%(version_major_minor)s/Prod/local_installers/', - 'https://developer.nvidia.com/compute/cuda/%(version_major_minor)s/prod/local_installers/', - 'https://developer.nvidia.com/compute/cuda/%(version_major_minor)s/Prod2/local_installers/', -] -sources = [ - '%%(namelower)s_%s_465.19.01_linux.run' % local_complete_version, -] -checksums = ['ad93ea98efced35855c58d3a0fc326377c60917cb3e8c017d3e6d88819bf2934'] - -dependencies = [ - ('nvidia-driver', 'default', '', SYSTEM), -] - -installopts = '--samplespath=%(installdir)s --samples' - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'system' diff --git a/Golden_Repo/c/CVS/CVS-1.11.23-GCCcore-10.3.0.eb b/Golden_Repo/c/CVS/CVS-1.11.23-GCCcore-10.3.0.eb deleted file mode 100644 index e6517f6e33448ee77f1591e14d5e857917a71fb6..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CVS/CVS-1.11.23-GCCcore-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -## - -easyblock = 'ConfigureMake' - -name = 'CVS' -version = '1.11.23' - -homepage = 'https://savannah.nongnu.org/projects/cvs' -description = """CVS is a version control system, an important component of - Source Configuration Management (SCM). -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [' http://ftp.gnu.org/non-gnu/cvs/source/stable/%(version)s/'] -sources = [SOURCELOWER_TAR_BZ2] - -patches = ['CVS-1.11.23-zlib-1.patch', 'CVS-1.11.23-getline.patch'] - -builddependencies = [ - ('binutils', '2.36.1') -] - -dependencies = [ - ('zlib', '1.2.11') -] - -sanity_check_paths = { - 'files': ['bin/cvs', 'bin/cvsbug', 'bin/rcs2log'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/c/CVS/CVS-1.11.23-GCCcore-9.3.0.eb b/Golden_Repo/c/CVS/CVS-1.11.23-GCCcore-9.3.0.eb deleted file mode 100644 index 5b91d81c4277d1519b1e442c6eea998b23871fe5..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CVS/CVS-1.11.23-GCCcore-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -## - -easyblock = 'ConfigureMake' - -name = 'CVS' -version = '1.11.23' - -homepage = 'https://savannah.nongnu.org/projects/cvs' -description = """CVS is a version control system, an important component of - Source Configuration Management (SCM). -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [' http://ftp.gnu.org/non-gnu/cvs/source/stable/%(version)s/'] -sources = [SOURCELOWER_TAR_BZ2] - -patches = ['CVS-1.11.23-zlib-1.patch', 'CVS-1.11.23-getline.patch'] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('zlib', '1.2.11') -] - -sanity_check_paths = { - 'files': ['bin/cvs', 'bin/cvsbug', 'bin/rcs2log'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/c/CVS/CVS-1.11.23-getline.patch b/Golden_Repo/c/CVS/CVS-1.11.23-getline.patch deleted file mode 100644 index 3d46025fe157cdb56f3af60344890b573a38759c..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CVS/CVS-1.11.23-getline.patch +++ /dev/null @@ -1,39 +0,0 @@ -diff -rup cvs-1.11.23.orig/doc/cvs.texinfo cvs-1.11.23/doc/cvs.texinfo ---- cvs-1.11.23.orig/doc/cvs.texinfo 2014-03-20 14:09:54.695168055 +0100 -+++ cvs-1.11.23/doc/cvs.texinfo 2014-03-20 14:10:09.206280539 +0100 -@@ -111,9 +111,9 @@ approved by the Free Software Foundation - @sp 4 - @comment The title is printed in a large font. - @center @titlefont{Version Management} --@sp -+@sp 1 - @center @titlefont{with} --@sp -+@sp 1 - @center @titlefont{CVS} - @sp 2 - @center for @sc{cvs} @value{VERSION} -diff -rup cvs-1.11.23.orig/lib/getline.c cvs-1.11.23/lib/getline.c ---- cvs-1.11.23.orig/lib/getline.c 2014-03-20 14:09:54.682167954 +0100 -+++ cvs-1.11.23/lib/getline.c 2014-03-20 14:10:09.175280299 +0100 -@@ -155,7 +155,7 @@ getstr (lineptr, n, stream, terminator, - } - - int --getline (lineptr, n, stream) -+get_line (lineptr, n, stream) - char **lineptr; - size_t *n; - FILE *stream; -diff -rup cvs-1.11.23.orig/lib/getline.h cvs-1.11.23/lib/getline.h ---- cvs-1.11.23.orig/lib/getline.h 2014-03-20 14:09:54.681167947 +0100 -+++ cvs-1.11.23/lib/getline.h 2014-03-20 14:10:09.176280306 +0100 -@@ -12,7 +12,7 @@ - #define GETLINE_NO_LIMIT -1 - - int -- getline __PROTO ((char **_lineptr, size_t *_n, FILE *_stream)); -+ get_line __PROTO ((char **_lineptr, size_t *_n, FILE *_stream)); - int - getline_safe __PROTO ((char **_lineptr, size_t *_n, FILE *_stream, - int limit)); diff --git a/Golden_Repo/c/CVS/CVS-1.11.23-zlib-1.patch b/Golden_Repo/c/CVS/CVS-1.11.23-zlib-1.patch deleted file mode 100644 index 869a3645ed3375e8faeb4ecc5cb90a63b7e97d6e..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CVS/CVS-1.11.23-zlib-1.patch +++ /dev/null @@ -1,53 +0,0 @@ -Submitted By: Matthew Burgess <matthew@linuxfromscratch.org> -Date: 2004-11-12 -Initial Package Version: 1.11.2 -Upstream Status: Not submitted -Origin: BLFS Dev Post -Description: Links against system zlib instead of the - internal zlib. - -$LastChangedBy: igor $ -$Date: 2005-10-28 09:20:09 -0500 (Fri, 28 Oct 2005) $ - -diff -Naur cvs-1.11.18.orig/src/Makefile.in cvs-1.11.18/src/Makefile.in ---- cvs-1.11.18.orig/src/Makefile.in 2004-11-11 18:17:20.000000000 +0000 -+++ cvs-1.11.18/src/Makefile.in 2004-11-12 19:58:14.962293296 +0000 -@@ -86,7 +86,7 @@ - version.$(OBJEXT) vers_ts.$(OBJEXT) watch.$(OBJEXT) \ - wrapper.$(OBJEXT) zlib.$(OBJEXT) - cvs_OBJECTS = $(am_cvs_OBJECTS) --cvs_DEPENDENCIES = ../diff/libdiff.a ../lib/libcvs.a ../zlib/libz.a -+cvs_DEPENDENCIES = ../diff/libdiff.a ../lib/libcvs.a - binSCRIPT_INSTALL = $(INSTALL_SCRIPT) - SCRIPTS = $(bin_SCRIPTS) - DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir) -@@ -199,7 +199,7 @@ - # some namespace hackery going on that maybe shouldn't be. Long term fix is to - # try and remove naming ocnflicts and fix Automake to allow particular includes - # to be attached only to particular object files. Short term fix is either or. --INCLUDES = -I$(top_srcdir)/lib -I$(top_srcdir)/diff -I$(top_srcdir)/zlib $(includeopt) -+INCLUDES = -I$(top_srcdir)/lib -I$(top_srcdir)/diff $(includeopt) - bin_SCRIPTS = cvsbug - - # The cvs executable -@@ -278,7 +278,7 @@ - cvs_LDADD = \ - ../diff/libdiff.a \ - ../lib/libcvs.a \ -- ../zlib/libz.a -+ -lz - - - # extra clean targets -diff -Naur cvs-1.11.18.orig/src/zlib.c cvs-1.11.18/src/zlib.c ---- cvs-1.11.18.orig/src/zlib.c 2004-03-19 19:18:57.000000000 +0000 -+++ cvs-1.11.18/src/zlib.c 2004-11-12 19:58:55.531125896 +0000 -@@ -22,7 +22,7 @@ - - #if defined (SERVER_SUPPORT) || defined (CLIENT_SUPPORT) - --#include "zlib.h" -+#include <zlib.h> - - /* OS/2 doesn't have EIO. FIXME: this whole notion of turning - a different error into EIO strikes me as pretty dubious. */ diff --git a/Golden_Repo/c/CVXOPT/CVXOPT-1.2.1-fix-setup-py.patch b/Golden_Repo/c/CVXOPT/CVXOPT-1.2.1-fix-setup-py.patch deleted file mode 100644 index 5975c4bb3289692195062ef9c552f1b6d2373f75..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CVXOPT/CVXOPT-1.2.1-fix-setup-py.patch +++ /dev/null @@ -1,128 +0,0 @@ -# Patches the setup.py to use EB settings for BLAS/LAPACK, FFTW, etc -# original by wpoely86@gmail.com, ported to v1.2.1 by Kenneth Hoste (HPC-UGent) ---- cvxopt-1.2.1/setup.py.orig 2018-08-30 19:54:12.000000000 +0200 -+++ cvxopt-1.2.1/setup.py 2018-10-02 16:28:57.252340779 +0200 -@@ -91,9 +91,11 @@ - LAPACK_LIB = os.environ.get("CVXOPT_LAPACK_LIB",LAPACK_LIB) - BLAS_LIB_DIR = os.environ.get("CVXOPT_BLAS_LIB_DIR",BLAS_LIB_DIR) - BLAS_EXTRA_LINK_ARGS = os.environ.get("CVXOPT_BLAS_EXTRA_LINK_ARGS",BLAS_EXTRA_LINK_ARGS) -+FFTW_EXTRA_LINK_ARGS = os.environ.get("CVXOPT_FFTW_EXTRA_LINK_ARGS",'') - if type(BLAS_LIB) is str: BLAS_LIB = BLAS_LIB.strip().split(';') - if type(LAPACK_LIB) is str: LAPACK_LIB = LAPACK_LIB.strip().split(';') --if type(BLAS_EXTRA_LINK_ARGS) is str: BLAS_EXTRA_LINK_ARGS = BLAS_EXTRA_LINK_ARGS.strip().split(';') -+if type(BLAS_EXTRA_LINK_ARGS) is str: BLAS_EXTRA_LINK_ARGS = BLAS_EXTRA_LINK_ARGS.strip().split(' ') -+if type(FFTW_EXTRA_LINK_ARGS) is str: FFTW_EXTRA_LINK_ARGS = FFTW_EXTRA_LINK_ARGS.strip().split(' ') - BUILD_GSL = int(os.environ.get("CVXOPT_BUILD_GSL",BUILD_GSL)) - GSL_LIB_DIR = os.environ.get("CVXOPT_GSL_LIB_DIR",GSL_LIB_DIR) - GSL_INC_DIR = os.environ.get("CVXOPT_GSL_INC_DIR",GSL_INC_DIR) -@@ -126,7 +128,7 @@ - # optional modules - - if BUILD_GSL: -- gsl = Extension('gsl', libraries = M_LIB + ['gsl'] + BLAS_LIB, -+ gsl = Extension('gsl', libraries = M_LIB + ['gsl'], - include_dirs = [ GSL_INC_DIR ], - library_dirs = [ GSL_LIB_DIR, BLAS_LIB_DIR ], - define_macros = GSL_MACROS, -@@ -135,11 +137,11 @@ - extmods += [gsl]; - - if BUILD_FFTW: -- fftw = Extension('fftw', libraries = ['fftw3'] + BLAS_LIB, -+ fftw = Extension('fftw', - include_dirs = [ FFTW_INC_DIR ], - library_dirs = [ FFTW_LIB_DIR, BLAS_LIB_DIR ], - define_macros = FFTW_MACROS, -- extra_link_args = BLAS_EXTRA_LINK_ARGS, -+ extra_link_args = BLAS_EXTRA_LINK_ARGS + FFTW_EXTRA_LINK_ARGS, - sources = ['src/C/fftw.c'] ) - extmods += [fftw]; - -@@ -151,7 +153,7 @@ - extmods += [glpk]; - - if BUILD_DSDP: -- dsdp = Extension('dsdp', libraries = ['dsdp'] + LAPACK_LIB + BLAS_LIB, -+ dsdp = Extension('dsdp', libraries = ['dsdp'], - include_dirs = [ DSDP_INC_DIR ], - library_dirs = [ DSDP_LIB_DIR, BLAS_LIB_DIR ], - extra_link_args = BLAS_EXTRA_LINK_ARGS, -@@ -160,19 +162,19 @@ - - # Required modules - --base = Extension('base', libraries = M_LIB + LAPACK_LIB + BLAS_LIB, -+base = Extension('base', - library_dirs = [ BLAS_LIB_DIR ], - define_macros = MACROS, - extra_link_args = BLAS_EXTRA_LINK_ARGS, - sources = ['src/C/base.c','src/C/dense.c','src/C/sparse.c']) - --blas = Extension('blas', libraries = BLAS_LIB, -+blas = Extension('blas', - library_dirs = [ BLAS_LIB_DIR ], - define_macros = MACROS, - extra_link_args = BLAS_EXTRA_LINK_ARGS, - sources = ['src/C/blas.c'] ) - --lapack = Extension('lapack', libraries = LAPACK_LIB + BLAS_LIB, -+lapack = Extension('lapack', - library_dirs = [ BLAS_LIB_DIR ], - define_macros = MACROS, - extra_link_args = BLAS_EXTRA_LINK_ARGS, -@@ -180,9 +182,10 @@ - - if not SUITESPARSE_SRC_DIR: - umfpack = Extension('umfpack', -- libraries = ['umfpack','cholmod','amd','colamd','suitesparseconfig'] + LAPACK_LIB + BLAS_LIB + RT_LIB, -+ libraries = ['umfpack','cholmod','amd','colamd','suitesparseconfig'] + RT_LIB, - include_dirs = [SUITESPARSE_INC_DIR], - library_dirs = [SUITESPARSE_LIB_DIR, BLAS_LIB_DIR], -+ extra_link_args = BLAS_EXTRA_LINK_ARGS, - sources = ['src/C/umfpack.c']) - else: - umfpack = Extension('umfpack', -@@ -193,7 +196,6 @@ - SUITESPARSE_SRC_DIR + '/SuiteSparse_config' ], - library_dirs = [ BLAS_LIB_DIR ], - define_macros = MACROS + [('NTIMER', '1'), ('NCHOLMOD', '1')], -- libraries = LAPACK_LIB + BLAS_LIB, - extra_compile_args = UMFPACK_EXTRA_COMPILE_ARGS, - extra_link_args = BLAS_EXTRA_LINK_ARGS, - sources = [ 'src/C/umfpack.c', -@@ -206,14 +208,13 @@ - - if not SUITESPARSE_SRC_DIR: - cholmod = Extension('cholmod', -- libraries = ['cholmod','colamd','amd','suitesparseconfig'] + LAPACK_LIB + BLAS_LIB + RT_LIB, -+ libraries = ['cholmod','colamd','amd','suitesparseconfig'] + RT_LIB, - include_dirs = [SUITESPARSE_INC_DIR], - library_dirs = [SUITESPARSE_LIB_DIR, BLAS_LIB_DIR], - sources = [ 'src/C/cholmod.c' ]) - else: - cholmod = Extension('cholmod', - library_dirs = [ BLAS_LIB_DIR ], -- libraries = LAPACK_LIB + BLAS_LIB, - include_dirs = [ SUITESPARSE_SRC_DIR + '/CHOLMOD/Include', - SUITESPARSE_SRC_DIR + '/COLAMD', - SUITESPARSE_SRC_DIR + '/AMD/Include', -@@ -235,17 +236,18 @@ - libraries = ['amd','suitesparseconfig'] + RT_LIB, - include_dirs = [SUITESPARSE_INC_DIR], - library_dirs = [SUITESPARSE_LIB_DIR], -+ extra_link_args = BLAS_EXTRA_LINK_ARGS, - sources = ['src/C/amd.c']) - else: - amd = Extension('amd', - include_dirs = [SUITESPARSE_SRC_DIR + '/AMD/Include', - SUITESPARSE_SRC_DIR + '/SuiteSparse_config' ], - define_macros = MACROS + [('NTIMER', '1')], -+ extra_link_args = BLAS_EXTRA_LINK_ARGS, - sources = [ 'src/C/amd.c', SUITESPARSE_SRC_DIR + '/SuiteSparse_config/SuiteSparse_config.c'] + - glob(SUITESPARSE_SRC_DIR + '/AMD/Source/*.c') ) - - misc_solvers = Extension('misc_solvers', -- libraries = LAPACK_LIB + BLAS_LIB, - library_dirs = [ BLAS_LIB_DIR ], - define_macros = MACROS, - extra_link_args = BLAS_EXTRA_LINK_ARGS, diff --git a/Golden_Repo/c/CVXOPT/CVXOPT-1.2.5-gpsmkl-2020-Python-3.8.5.eb b/Golden_Repo/c/CVXOPT/CVXOPT-1.2.5-gpsmkl-2020-Python-3.8.5.eb deleted file mode 100644 index 59c5d190ee45a32124b9c7c25bb585ddb3210892..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CVXOPT/CVXOPT-1.2.5-gpsmkl-2020-Python-3.8.5.eb +++ /dev/null @@ -1,46 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'CVXOPT' -version = '1.2.5' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://cvxopt.org' -description = """CVXOPT is a free software package for convex optimization based on the Python programming language. - Its main purpose is to make the development of software for convex optimization applications straightforward by - building on Python's extensive standard library and on the strengths of Python as a high-level programming language. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -patches = ['CVXOPT-1.2.1-fix-setup-py.patch'] -checksums = [ - '94ec8c36bd6628a11de9014346692daeeef99b3b7bae28cef30c7490bbcb2d72', # cvxopt-1.2.5.tar.gz - '85d8475098895e9af45f330489a712b5b944489c5fb4a6c67f59bef8fed4303d', # CVXOPT-1.2.1-fix-setup-py.patch -] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SuiteSparse', '5.7.1', '-CUDA'), - ('GSL', '2.6'), -] - -download_dep_fail = True -use_pip = True - -preinstallopts = 'CVXOPT_BUILD_FFTW=1 CVXOPT_BUILD_GSL=1 CVXOPT_BLAS_EXTRA_LINK_ARGS="$LIBLAPACK" ' -preinstallopts += 'CVXOPT_FFTW_EXTRA_LINK_ARGS="$LIBFFT" CVXOPT_SUITESPARSE_SRC_DIR=$EBROOTSUITESPARSE' - -installopts = ' --no-binary cvxopt' - -sanity_check_commands = ['nosetests'] - -moduleclass = 'math' diff --git a/Golden_Repo/c/CVXOPT/CVXOPT-1.2.5-gpsmkl-2021-Python-3.8.5.eb b/Golden_Repo/c/CVXOPT/CVXOPT-1.2.5-gpsmkl-2021-Python-3.8.5.eb deleted file mode 100644 index 6e069a2f060bd810de75fd4868431329c83af2e8..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CVXOPT/CVXOPT-1.2.5-gpsmkl-2021-Python-3.8.5.eb +++ /dev/null @@ -1,46 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'CVXOPT' -version = '1.2.5' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://cvxopt.org' -description = """CVXOPT is a free software package for convex optimization based on the Python programming language. - Its main purpose is to make the development of software for convex optimization applications straightforward by - building on Python's extensive standard library and on the strengths of Python as a high-level programming language. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -patches = ['CVXOPT-1.2.1-fix-setup-py.patch'] -checksums = [ - '94ec8c36bd6628a11de9014346692daeeef99b3b7bae28cef30c7490bbcb2d72', # cvxopt-1.2.5.tar.gz - '85d8475098895e9af45f330489a712b5b944489c5fb4a6c67f59bef8fed4303d', # CVXOPT-1.2.1-fix-setup-py.patch -] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SuiteSparse', '5.7.1', '-CUDA'), - ('GSL', '2.6'), -] - -download_dep_fail = True -use_pip = True - -preinstallopts = 'CVXOPT_BUILD_FFTW=1 CVXOPT_BUILD_GSL=1 CVXOPT_BLAS_EXTRA_LINK_ARGS="$LIBLAPACK" ' -preinstallopts += 'CVXOPT_FFTW_EXTRA_LINK_ARGS="$LIBFFT" CVXOPT_SUITESPARSE_SRC_DIR=$EBROOTSUITESPARSE' - -installopts = ' --no-binary cvxopt' - -sanity_check_commands = ['nosetests'] - -moduleclass = 'math' diff --git a/Golden_Repo/c/Cartopy/Cartopy-0.18.0-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/c/Cartopy/Cartopy-0.18.0-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index aeeee5746469ce1e28ac97d2d2d0a4536d986254..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Cartopy/Cartopy-0.18.0-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,49 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'Cartopy' -version = '0.18.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://scitools.org.uk/cartopy/docs/latest/' -description = """Cartopy is a Python package designed to make drawing maps for data analysis and visualisation easy.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -dependencies = [ - ('Python', '3.8.5'), - ('Fiona', '1.8.16', versionsuffix), - ('GDAL', '3.1.2', versionsuffix), - ('GEOS', '3.8.1', versionsuffix), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('pyproj', '2.6.1.post1', versionsuffix), - ('Shapely', '1.7.1', versionsuffix), - ('PROJ', '7.1.0'), -] - -use_pip = True -sanity_pip_check = True - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('OWSLib', '0.20.0', { - 'checksums': ['334988857b260c8cdf1f6698d07eab61839c51acb52ee10eed1275439200a40e'], - }), - ('pyepsg', '0.4.0', { - 'checksums': ['2d08fad1e7a8b47a90a4e43da485ba95705923425aefc4e2a3efa540dbd470d7'], - }), - ('pykdtree', '1.3.1', { - 'checksums': ['0d49d3bbfa0366dbe29176754ec86df75114a25525b530dcbbb75d3ac4c263e9'], - }), - ('pyshp', '2.1.0', { - 'modulename': 'shapefile', - 'checksums': ['e65c7f24d372b97d0920b864bbeb78322bb37b83f2606e2a2212631d5d51e5c0'], - }), - (name, version, { - 'checksums': ['7ffa317e8f8011e0d965a3ef1179e57a049f77019867ed677d49dcc5c0744434'], - }), -] - -moduleclass = 'geo' diff --git a/Golden_Repo/c/Cirq/Cirq-0.9.1-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/c/Cirq/Cirq-0.9.1-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index 2566a96dfd9234bbbe65c4e94f1c1fbc75cfc31d..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Cirq/Cirq-0.9.1-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,99 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'Cirq' -version = '0.9.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/quantumlib/cirq' -description = """A python framework for creating, editing, -and invoking Noisy Intermediate Scale Quantum (NISQ) circuits.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), - ('protobuf', '3.12.4'), - ('texlive', '20200406'), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'use_pip_for_deps': False, - 'sanity_pip_check': True, -} - -exts_list = [ - ('cachetools', '4.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bbaa39c3dede00175df2dc2b03d0cf18dd2d32a7de7beb68072d13043c9edb20')]), - ])), - ('pyasn1-modules', '0.2.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e')]), - ])), - ('rsa', '4.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '109ea5a66744dd859bf16fe904b8d8b627adafb9408753161e766a92e7d681fa')]), - ])), - ('google-auth', '1.23.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5176db85f1e7e837a646cd9cede72c3c404ccf2e3373d9ee14b2db88febad440')]), - ('modulename', 'google.auth'), - ])), - ('googleapis-common-protos', '1.52.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '560716c807117394da12cecb0a54da5a451b5cf9866f1d37e9a5e2329a665351')]), - ('modulename', 'googleapiclient') - ])), - ('grpcio', '1.33.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '21265511880056d19ce4f809ce3fbe2a3fa98ec1fc7167dbdf30a80d3276202e')]), - ('modulename', 'grpc') - ])), - ('google-api-core', '1.23.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1bb3c485c38eacded8d685b1759968f6cf47dd9432922d34edb90359eaa391e2')]), - ('modulename', 'google'), - ])), - ('httplib2', '0.18.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8af66c1c52c7ffe1aa5dc4bcd7c769885254b0756e6e69f953c7f0ab49a70ba3')]), - ])), - ('uritemplate', '3.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae')]), - ])), - ('google-auth-httplib2', '0.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8d092cc60fb16517b12057ec0bba9185a96e3b7169d86ae12eae98e645b7bc39')]), - ])), - ('google-api-python-client', '1.12.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1892cd490d164e5ec2f2168dc3b4fa0af68f36ca15a88b91bca1826b3d4f2829')]), - ('modulename', 'googleapiclient'), - ])), - ('typing_extensions', '3.7.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c')]), - ])), - ('sortedcontainers', '2.2.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4e73a757831fc3ca4de2859c422564239a31d8213d09a2a666e375807034d2ba')]), - ])), - ('networkx', '2.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7978955423fbc9639c10498878be59caf99b44dc304c2286162fd24b458c1602')]), - ])), - ('freezegun', '0.3.15', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e2062f2c7f95cc276a834c22f1a17179467176b624cc6f936e8bc3be5535ad1b')]), - ])), - ('protobuf', '3.12.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c99e5aea75b6f2b29c8d8da5bdc5f5ed8d9a5b4f15115c8316a3f0a850f94656')]), - ('modulename', 'google.protobuf') - ])), - ('cirq', version, dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'cirq-%(version)s-py3-none-any.whl'), - ('checksums', [('sha256', 'd900b861f2132a673b511b22ec80955cedec34c1bfa95d8f39cdc1eab5309242')]), - ('use_pip', True), - ('unpack_sources', False), - ])), -] - -moduleclass = 'quantum' diff --git a/Golden_Repo/c/Cirq/Cirq-0.9.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/c/Cirq/Cirq-0.9.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index f8bfa5a865f9e90e564222fd8cfe871843b905c3..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Cirq/Cirq-0.9.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,99 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'Cirq' -version = '0.9.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/quantumlib/cirq' -description = """A python framework for creating, editing, -and invoking Noisy Intermediate Scale Quantum (NISQ) circuits.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), - ('protobuf', '3.12.4'), - ('texlive', '20200406'), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'use_pip_for_deps': False, - 'sanity_pip_check': True, -} - -exts_list = [ - ('cachetools', '4.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bbaa39c3dede00175df2dc2b03d0cf18dd2d32a7de7beb68072d13043c9edb20')]), - ])), - ('pyasn1-modules', '0.2.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e')]), - ])), - ('rsa', '4.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '109ea5a66744dd859bf16fe904b8d8b627adafb9408753161e766a92e7d681fa')]), - ])), - ('google-auth', '1.23.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5176db85f1e7e837a646cd9cede72c3c404ccf2e3373d9ee14b2db88febad440')]), - ('modulename', 'google.auth'), - ])), - ('googleapis-common-protos', '1.52.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '560716c807117394da12cecb0a54da5a451b5cf9866f1d37e9a5e2329a665351')]), - ('modulename', 'googleapiclient') - ])), - ('grpcio', '1.33.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '21265511880056d19ce4f809ce3fbe2a3fa98ec1fc7167dbdf30a80d3276202e')]), - ('modulename', 'grpc') - ])), - ('google-api-core', '1.23.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1bb3c485c38eacded8d685b1759968f6cf47dd9432922d34edb90359eaa391e2')]), - ('modulename', 'google'), - ])), - ('httplib2', '0.18.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8af66c1c52c7ffe1aa5dc4bcd7c769885254b0756e6e69f953c7f0ab49a70ba3')]), - ])), - ('uritemplate', '3.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae')]), - ])), - ('google-auth-httplib2', '0.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8d092cc60fb16517b12057ec0bba9185a96e3b7169d86ae12eae98e645b7bc39')]), - ])), - ('google-api-python-client', '1.12.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1892cd490d164e5ec2f2168dc3b4fa0af68f36ca15a88b91bca1826b3d4f2829')]), - ('modulename', 'googleapiclient'), - ])), - ('typing_extensions', '3.7.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c')]), - ])), - ('sortedcontainers', '2.2.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4e73a757831fc3ca4de2859c422564239a31d8213d09a2a666e375807034d2ba')]), - ])), - ('networkx', '2.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7978955423fbc9639c10498878be59caf99b44dc304c2286162fd24b458c1602')]), - ])), - ('freezegun', '0.3.15', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e2062f2c7f95cc276a834c22f1a17179467176b624cc6f936e8bc3be5535ad1b')]), - ])), - ('protobuf', '3.12.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c99e5aea75b6f2b29c8d8da5bdc5f5ed8d9a5b4f15115c8316a3f0a850f94656')]), - ('modulename', 'google.protobuf') - ])), - ('cirq', version, dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'cirq-%(version)s-py3-none-any.whl'), - ('checksums', [('sha256', 'd900b861f2132a673b511b22ec80955cedec34c1bfa95d8f39cdc1eab5309242')]), - ('use_pip', True), - ('unpack_sources', False), - ])), -] - -moduleclass = 'quantum' diff --git a/Golden_Repo/c/Clang/Clang-11.0.0-GCCcore-10.3.0.eb b/Golden_Repo/c/Clang/Clang-11.0.0-GCCcore-10.3.0.eb deleted file mode 100644 index 4e2846ed3d94a0c6824ff422948e9849c2ee1dd4..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Clang/Clang-11.0.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,97 +0,0 @@ -# For using $SYSTEMNAME to determine compute capability. The local prefix is to appease the checker -import os as local_os - -name = 'Clang' -version = '11.0.0' - -homepage = 'https://clang.llvm.org/' -description = """C, C++, Objective-C compiler, based on LLVM. Does not - include C++ standard library -- use libstdc++ from GCC.""" -site_contacts = 'sc@fz-juelich.de' - -# Clang also depends on libstdc++ during runtime, but this dependency is -# already specified as the toolchain. -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - "https://github.com/llvm/llvm-project/releases/download/llvmorg-%(version)s" -] -sources = [ - 'llvm-%(version)s.src.tar.xz', - 'clang-%(version)s.src.tar.xz', - 'compiler-rt-%(version)s.src.tar.xz', - 'polly-%(version)s.src.tar.xz', - 'openmp-%(version)s.src.tar.xz', - # Also include the LLVM linker - 'lld-%(version)s.src.tar.xz', - 'libcxx-%(version)s.src.tar.xz', - 'libcxxabi-%(version)s.src.tar.xz', - 'clang-tools-extra-%(version)s.src.tar.xz', -] -checksums = [ - # llvm-11.0.0.src.tar.xz - '913f68c898dfb4a03b397c5e11c6a2f39d0f22ed7665c9cefa87a34423a72469', - # clang-11.0.0.src.tar.xz - '0f96acace1e8326b39f220ba19e055ba99b0ab21c2475042dbc6a482649c5209', - # compiler-rt-11.0.0.src.tar.xz - '374aff82ff573a449f9aabbd330a5d0a441181c535a3599996127378112db234', - # polly-11.0.0.src.tar.xz - 'dcfadb8d11f2ea0743a3f19bab3b43ee1cb855e136bc81c76e2353cd76148440', - # openmp-11.0.0.src.tar.xz - '2d704df8ca67b77d6d94ebf79621b0f773d5648963dd19e0f78efef4404b684c', - # lld-11.0.0.src.tar.xz - 'efe7be4a7b7cdc6f3bcf222827c6f837439e6e656d12d6c885d5c8a80ff4fd1c', - # libcxx-11.0.0.src.tar.xz - '6c1ee6690122f2711a77bc19241834a9219dda5036e1597bfa397f341a9b8b7a', - # libcxxabi-11.0.0.src.tar.xz - '58697d4427b7a854ec7529337477eb4fba16407222390ad81a40d125673e4c15', - # clang-tools-extra-11.0.0.src.tar.xz - 'fed318f75d560d0e0ae728e2fb8abce71e9d0c60dd120c9baac118522ce76c09', -] - -dependencies = [ - # since Clang is a compiler, binutils is a runtime dependency too - ('binutils', '2.36.1'), - ('hwloc', '2.4.1'), - ('libxml2', '2.9.10'), - ('ncurses', '6.2'), - ('GMP', '6.2.0'), - ('Z3', '4.8.9'), -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Python', '3.8.5'), -] - -default_cuda_capability = { - 'juwels': '70', - 'juwelsbooster': '80', - 'jurecadc': '80', - 'jusuf': '70', - 'hdfml': '70' -}[local_os.environ['SYSTEMNAME']] - -cuda_compute_capabilities = { - 'juwels': ['7.0'], - 'juwelsbooster': ['8.0'], - 'jurecadc': ['8.0'], - 'jusuf': ['7.0'], - 'hdfml': ['7.0'] -}[local_os.environ['SYSTEMNAME']] - -assertions = True -usepolly = True -build_lld = True -libcxx = True -enable_rtti = True -build_extra_clang_tools = True - -skip_all_tests = True - -modluafooter = ''' - add_property("arch","gpu") -''' - -# it should be a compiler, but then we won't be able to load GCC+MPI -moduleclass = 'devel' diff --git a/Golden_Repo/c/Clang/Clang-11.0.0-GCCcore-9.3.0.eb b/Golden_Repo/c/Clang/Clang-11.0.0-GCCcore-9.3.0.eb deleted file mode 100644 index e8e927d8eb8c5c98e9b59dec494a71bc6a436355..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Clang/Clang-11.0.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,97 +0,0 @@ -# For using $SYSTEMNAME to determine compute capability. The local prefix is to appease the checker -import os as local_os - -name = 'Clang' -version = '11.0.0' - -homepage = 'https://clang.llvm.org/' -description = """C, C++, Objective-C compiler, based on LLVM. Does not - include C++ standard library -- use libstdc++ from GCC.""" -site_contacts = 'sc@fz-juelich.de' - -# Clang also depends on libstdc++ during runtime, but this dependency is -# already specified as the toolchain. -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [ - "https://github.com/llvm/llvm-project/releases/download/llvmorg-%(version)s" -] -sources = [ - 'llvm-%(version)s.src.tar.xz', - 'clang-%(version)s.src.tar.xz', - 'compiler-rt-%(version)s.src.tar.xz', - 'polly-%(version)s.src.tar.xz', - 'openmp-%(version)s.src.tar.xz', - # Also include the LLVM linker - 'lld-%(version)s.src.tar.xz', - 'libcxx-%(version)s.src.tar.xz', - 'libcxxabi-%(version)s.src.tar.xz', - 'clang-tools-extra-%(version)s.src.tar.xz', -] -checksums = [ - # llvm-11.0.0.src.tar.xz - '913f68c898dfb4a03b397c5e11c6a2f39d0f22ed7665c9cefa87a34423a72469', - # clang-11.0.0.src.tar.xz - '0f96acace1e8326b39f220ba19e055ba99b0ab21c2475042dbc6a482649c5209', - # compiler-rt-11.0.0.src.tar.xz - '374aff82ff573a449f9aabbd330a5d0a441181c535a3599996127378112db234', - # polly-11.0.0.src.tar.xz - 'dcfadb8d11f2ea0743a3f19bab3b43ee1cb855e136bc81c76e2353cd76148440', - # openmp-11.0.0.src.tar.xz - '2d704df8ca67b77d6d94ebf79621b0f773d5648963dd19e0f78efef4404b684c', - # lld-11.0.0.src.tar.xz - 'efe7be4a7b7cdc6f3bcf222827c6f837439e6e656d12d6c885d5c8a80ff4fd1c', - # libcxx-11.0.0.src.tar.xz - '6c1ee6690122f2711a77bc19241834a9219dda5036e1597bfa397f341a9b8b7a', - # libcxxabi-11.0.0.src.tar.xz - '58697d4427b7a854ec7529337477eb4fba16407222390ad81a40d125673e4c15', - # clang-tools-extra-11.0.0.src.tar.xz - 'fed318f75d560d0e0ae728e2fb8abce71e9d0c60dd120c9baac118522ce76c09', -] - -dependencies = [ - # since Clang is a compiler, binutils is a runtime dependency too - ('binutils', '2.34'), - ('hwloc', '2.2.0'), - ('libxml2', '2.9.10'), - ('ncurses', '6.2'), - ('GMP', '6.2.0'), - ('Z3', '4.8.9'), -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Python', '3.8.5'), -] - -default_cuda_capability = { - 'juwels': '70', - 'juwelsbooster': '80', - 'jurecadc': '80', - 'jusuf': '70', - 'hdfml': '70' -}[local_os.environ['SYSTEMNAME']] - -cuda_compute_capabilities = { - 'juwels': ['7.0'], - 'juwelsbooster': ['8.0'], - 'jurecadc': ['8.0'], - 'jusuf': ['7.0'], - 'hdfml': ['7.0'] -}[local_os.environ['SYSTEMNAME']] - -assertions = True -usepolly = True -build_lld = True -libcxx = True -enable_rtti = True -build_extra_clang_tools = True - -skip_all_tests = True - -modluafooter = ''' - add_property("arch","gpu") -''' - -# it should be a compiler, but then we won't be able to load GCC+MPI -moduleclass = 'devel' diff --git a/Golden_Repo/c/Cling/Cling-0.7-GCCcore-9.3.0.eb b/Golden_Repo/c/Cling/Cling-0.7-GCCcore-9.3.0.eb deleted file mode 100644 index 89f2b67c680e580ba647a71064b19b4d0c942b56..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Cling/Cling-0.7-GCCcore-9.3.0.eb +++ /dev/null @@ -1,57 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Cling' -version = '0.7' - -homepage = "https://cdn.rawgit.com/root-project/cling/master/www/index.html" -description = """Cling is an interactive C++ interpreter, built on the top of LLVM and Clang libraries. -Its advantages over the standard interpreters are that it has command line prompt -and uses just-in-time (JIT) compiler for compilation. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/vgvassilev/cling/archive/'] -sources = ['v%(version)s.tar.gz'] - -patches = [('patchpatch4llvmtools.patch', 1)] - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34',), -] - -separate_build_dir = True -srcdir = "%(builddir)s/cling-%(version)s/src" - -preconfigopts = "mkdir -p %(builddir)s/cling-%(version)s && " - -# get source -preconfigopts += "cd %(builddir)s/cling-%(version)s && " -preconfigopts += "git clone http://root.cern.ch/git/llvm.git src && " -preconfigopts += "cd src && " -preconfigopts += "git checkout cling-patches && " -preconfigopts += "cd tools && " -preconfigopts += "git clone http://root.cern.ch/git/cling.git && " -preconfigopts += "git clone http://root.cern.ch/git/clang.git && " -preconfigopts += "cd clang && " -preconfigopts += "git checkout cling-patches && " -preconfigopts += "cd ../.. && " - -# patch source (or turn off llvm tools) -# configopts = ['-DLLVM_BUILD_TOOLS=Off'] -preconfigopts += "cd %(builddir)s/cling-%(version)s && " -preconfigopts += "patch -s -p0 < patch4llvmtools.patch && " - -# cd to easybuild standard build directory -preconfigopts += "cd %(builddir)s/easybuild_obj/ && " - -sanity_check_paths = { - 'files': ['bin/cling'], - 'dirs': ['bin', 'include', 'lib', 'share'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/c/Cling/Cling-0.9-GCCcore-10.3.0.eb b/Golden_Repo/c/Cling/Cling-0.9-GCCcore-10.3.0.eb deleted file mode 100644 index 98e46cc1036c49abc4598e66cf35a858ead8b240..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Cling/Cling-0.9-GCCcore-10.3.0.eb +++ /dev/null @@ -1,69 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Cling' -version = '0.9' - -homepage = "https://cdn.rawgit.com/root-project/cling/master/www/index.html" -description = """Cling is an interactive C++ interpreter, built on the top of LLVM and Clang libraries. -Its advantages over the standard interpreters are that it has command line prompt -and uses just-in-time (JIT) compiler for compilation. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/root-project/cling/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['5fe545b4ca2884dc861e1241f2ae7b975b60514062675995cfbc401e3b3e8258'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('binutils', '2.36.1',), -] - -separate_build_dir = True -srcdir = "%(builddir)s/src" - -# get source -preconfigopts = ( - - # get compatible LLVM version id - 'pushd %(builddir)s/cling-%(version)s && ' - 'LLVM_RELEASE=$(cat LastKnownGoodLLVMSVNRevision.txt) && ' - - # clone compatible LLVM branch - 'cd .. && ' - 'git clone http://root.cern.ch/git/llvm.git src && ' - 'cd src && ' - 'git checkout cling-patches-r${LLVM_RELEASE} && ' - - # clone compatible Clang branch to the correct position in LLVM src - 'cd tools && ' - 'git clone http://root.cern.ch/git/clang.git && ' - 'cd clang && ' - 'git checkout cling-patches-r${LLVM_RELEASE} && ' - - # add cling src to the correct position LLVM src - 'cd .. && ' - 'ln -s ../../cling-%(version)s cling && ' - - # cd to easybuild standard build directory - 'popd && ' -) - -# copy jupyter kernel files -postinstallcmds = [ - # copy Jupyter kernel install files - # https://cdn.rawgit.com/root-project/cling/master/www/jupyter.html - 'mkdir -p %(installdir)s/share/cling/ ', - 'cp -a %(builddir)s/cling-%(version)s/tools/Jupyter %(installdir)s/share/cling ', -] - -sanity_check_paths = { - 'files': ['bin/cling'], - 'dirs': ['bin', 'include', 'lib', 'share'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/c/Cling/Cling-0.9-GCCcore-9.3.0.eb b/Golden_Repo/c/Cling/Cling-0.9-GCCcore-9.3.0.eb deleted file mode 100644 index 5141c890b1d7289609eea472f3ca1449b0341caa..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Cling/Cling-0.9-GCCcore-9.3.0.eb +++ /dev/null @@ -1,69 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Cling' -version = '0.9' - -homepage = "https://cdn.rawgit.com/root-project/cling/master/www/index.html" -description = """Cling is an interactive C++ interpreter, built on the top of LLVM and Clang libraries. -Its advantages over the standard interpreters are that it has command line prompt -and uses just-in-time (JIT) compiler for compilation. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/root-project/cling/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['5fe545b4ca2884dc861e1241f2ae7b975b60514062675995cfbc401e3b3e8258'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34',), -] - -separate_build_dir = True -srcdir = "%(builddir)s/src" - -# get source -preconfigopts = ( - - # get compatible LLVM version id - 'pushd %(builddir)s/cling-%(version)s && ' - 'LLVM_RELEASE=$(cat LastKnownGoodLLVMSVNRevision.txt) && ' - - # clone compatible LLVM branch - 'cd .. && ' - 'git clone http://root.cern.ch/git/llvm.git src && ' - 'cd src && ' - 'git checkout cling-patches-r${LLVM_RELEASE} && ' - - # clone compatible Clang branch to the correct position in LLVM src - 'cd tools && ' - 'git clone http://root.cern.ch/git/clang.git && ' - 'cd clang && ' - 'git checkout cling-patches-r${LLVM_RELEASE} && ' - - # add cling src to the correct position LLVM src - 'cd .. && ' - 'ln -s ../../cling-%(version)s cling && ' - - # cd to easybuild standard build directory - 'popd && ' -) - -# copy jupyter kernel files -postinstallcmds = [ - # copy Jupyter kernel install files - # https://cdn.rawgit.com/root-project/cling/master/www/jupyter.html - 'mkdir -p %(installdir)s/share/cling/ ', - 'cp -a %(builddir)s/cling-%(version)s/tools/Jupyter %(installdir)s/share/cling ', -] - -sanity_check_paths = { - 'files': ['bin/cling'], - 'dirs': ['bin', 'include', 'lib', 'share'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/c/Cling/patchpatch4llvmtools.patch b/Golden_Repo/c/Cling/patchpatch4llvmtools.patch deleted file mode 100644 index 4e0755341d0b42ed6d7ebf9af7a516180f177108..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Cling/patchpatch4llvmtools.patch +++ /dev/null @@ -1,15 +0,0 @@ -diff -Naur cling-0.7.orig/patch4llvmtools.patch cling-0.7/patch4llvmtools.patch ---- cling-0.7.orig/patch4llvmtools.patch 1970-01-01 01:00:00.000000000 +0100 -+++ cling-0.7/patch4llvmtools.patch 2020-12-10 19:28:27.244406042 +0100 -@@ -0,0 +1,11 @@ -+--- src.orig/include/llvm/ExecutionEngine/Orc/OrcRemoteTargetClient.h 2020-12-10 18:50:25.357525801 +0100 -++++ src/include/llvm/ExecutionEngine/Orc/OrcRemoteTargetClient.h 2020-12-10 18:53:47.450543728 +0100 -+@@ -713,7 +713,7 @@ -+ -+ uint32_t getTrampolineSize() const { return RemoteTrampolineSize; } -+ -+- Expected<std::vector<char>> readMem(char *Dst, JITTargetAddress Src, -++ Expected<std::vector<uint8_t>> readMem(char *Dst, JITTargetAddress Src, -+ uint64_t Size) { -+ // Check for an 'out-of-band' error, e.g. from an MM destructor. -+ if (ExistingError) diff --git a/Golden_Repo/c/Code_Saturne/Code_Saturne-6.1.1-gpsmkl-2020.eb b/Golden_Repo/c/Code_Saturne/Code_Saturne-6.1.1-gpsmkl-2020.eb deleted file mode 100644 index caedcfdbfc8b7f5dae5dca12fc6bf1a996cc1aa7..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Code_Saturne/Code_Saturne-6.1.1-gpsmkl-2020.eb +++ /dev/null @@ -1,54 +0,0 @@ -# easyconfig file for Code_Saturne -# author: Metin Cakircali (Juelich Supercomputing Centre) -# maintainer since 2020: Seong-Ryong Koh, Alex Strube at Juelich Supercomputing Centre (JSC) -name = 'Code_Saturne' -version = '6.1.1' - -# extra option for the SLURM batch system -slurm = True - -homepage = 'https://www.code-saturne.org' -description = """Code_Saturne solves the Navier-Stokes equations -for 2D, 2D-axisymmetric and 3D flows, steady or unsteady, -laminar or turbulent, incompressible or weakly dilatable, -isothermal or not, with scalars transport if required. - -Code_Saturne %(version)s%(versionsuffix)s is installed in -$EBROOTCODE_SATURNE -""" -site_contacts = 's.koh@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -sources = [{ - 'source_urls': ['https://github.com/code-saturne/code_saturne/archive/'], - 'filename': 'v%(version)s.tar.gz' -}] - -builddependencies = [ - ('Autotools', '20200321'), - ('gettext', '0.20.2'), - ('Bison', '3.6.4'), - ('flex', '2.6.4'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PyQt5', '5.15.1', '-Python-%(pyver)s'), - ('HDF5', '1.10.6'), - ('MED', '4.0.0'), - ('CGNS', '4.1.1'), - ('ParMETIS', '4.0.3'), - ('SCOTCH', '6.1.0'), - ('ParaView', '5.8.1', '-Python-%(pyver)s', ('gpsmkl', '2020')) -] - -# better to configure these dependents explicitly -configopts = ['--with-med=$EBROOTMED --with-cgns=$EBROOTCGNS --with-metis=$EBROOTPARMETIS --with-scotch=$EBROOTSCOTCH'] - -modloadmsg = "To benefit from shell completion for %(name)s commands and\n" -modloadmsg += "options, you may also source a bash completion file by;\n" -modloadmsg += ". $CS_BASH\n" - -moduleclass = 'cae' diff --git a/Golden_Repo/c/Coreutils/Coreutils-8.32-GCCcore-10.3.0.eb b/Golden_Repo/c/Coreutils/Coreutils-8.32-GCCcore-10.3.0.eb deleted file mode 100644 index c182cf2a1a566dc594eca34b06f389e522c008c5..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Coreutils/Coreutils-8.32-GCCcore-10.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = "Coreutils" -version = "8.32" - -homepage = 'http://www.gnu.org/software/coreutils/' -description = """The GNU Core Utilities are the basic file, shell and text -manipulation utilities of the GNU operating system. These are -the core utilities which are expected to exist on every -operating system. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [('binutils', '2.36.1')] - -sanity_check_paths = { - 'files': ['bin/sort', 'bin/echo', 'bin/du', 'bin/date', 'bin/true'], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/c/Coreutils/Coreutils-8.32-GCCcore-9.3.0.eb b/Golden_Repo/c/Coreutils/Coreutils-8.32-GCCcore-9.3.0.eb deleted file mode 100644 index a446d064a813256f760235aa85cf66209e32fa27..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/Coreutils/Coreutils-8.32-GCCcore-9.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = "Coreutils" -version = "8.32" - -homepage = 'http://www.gnu.org/software/coreutils/' -description = """The GNU Core Utilities are the basic file, shell and text -manipulation utilities of the GNU operating system. These are -the core utilities which are expected to exist on every -operating system. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [('binutils', '2.34')] - -sanity_check_paths = { - 'files': ['bin/sort', 'bin/echo', 'bin/du', 'bin/date', 'bin/true'], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/c/CubeGUI/CubeGUI-4.5-GCCcore-9.3.0.eb b/Golden_Repo/c/CubeGUI/CubeGUI-4.5-GCCcore-9.3.0.eb deleted file mode 100644 index 3d07eb8fb028e35509f198bc640d93b75016e763..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeGUI/CubeGUI-4.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,53 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeGUI' -version = '4.5' - -homepage = 'http://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube graphical report explorer. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/cube/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), - ('CubeLib', '4.5'), -] - -dependencies = [ - ('Qt5', '5.14.2'), -] - -sanity_check_paths = { - 'files': ['bin/cube', 'bin/cubegui-config', - ('lib/libcube4gui.a', 'lib64/libcube4gui.a'), - ('lib/libcube4gui.%s' % SHLIB_EXT, 'lib64/libcube4gui.%s' % SHLIB_EXT)], - 'dirs': ['include/cubegui', 'lib/cube-plugins'], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/c/CubeGUI/CubeGUI-4.6-GCCcore-10.3.0.eb b/Golden_Repo/c/CubeGUI/CubeGUI-4.6-GCCcore-10.3.0.eb deleted file mode 100644 index 119739e137a2a940127f4b339227aca07f58cf9b..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeGUI/CubeGUI-4.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,64 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018-2021 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeGUI' -version = '4.6' - -homepage = 'https://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube graphical report explorer. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/cubegui/tags/cubegui-%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - '1871c6736121d94a22314cb5daa8f3cbb978b58bfe54f677c4c9c9693757d0c5', # cubegui-4.6.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.36.1'), - ('CubeLib', '4.6'), -] - -dependencies = [ - ('Qt5', '5.14.2'), -] - -sanity_check_paths = { - 'files': ['bin/cube', 'bin/cubegui-config', - ('lib/libcube4gui.a', 'lib64/libcube4gui.a'), - ('lib/libcube4gui.%s' % SHLIB_EXT, 'lib64/libcube4gui.%s' % SHLIB_EXT)], - 'dirs': ['include/cubegui', 'lib/cube-plugins'], -} - -# CubeGUI (and other Qt apps that use OpenGl) crash from time to time -# or don't show any output when using Qt's WebEngine with the default -# KNOB_MAX_WORKER_THREADS value of 65535. Even with a value of 10 this -# behavior doesn't vanish. Thus, don't use WebEngine at all, although -# it makes nicer output. -configopts = '--without-web-engine' - -moduleclass = 'perf' diff --git a/Golden_Repo/c/CubeGUI/CubeGUI-4.6-GCCcore-9.3.0.eb b/Golden_Repo/c/CubeGUI/CubeGUI-4.6-GCCcore-9.3.0.eb deleted file mode 100644 index 9e8f37fbdce7f0b25e1a4a3c8512aa9099aa61ee..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeGUI/CubeGUI-4.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,65 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018-2021 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeGUI' -version = '4.6' - -homepage = 'https://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube graphical report explorer. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/cubegui/tags/cubegui-%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - '1871c6736121d94a22314cb5daa8f3cbb978b58bfe54f677c4c9c9693757d0c5', # cubegui-4.6.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), - ('CubeLib', '4.6'), -] - -dependencies = [ - ('Qt5', '5.14.2'), -] - -sanity_check_paths = { - 'files': ['bin/cube', 'bin/cubegui-config', - ('lib/libcube4gui.a', 'lib64/libcube4gui.a'), - ('lib/libcube4gui.%s' % SHLIB_EXT, 'lib64/libcube4gui.%s' % SHLIB_EXT)], - 'dirs': ['include/cubegui', 'lib/cube-plugins'], -} - -# CubeGUI (and other Qt apps that use OpenGl) crash from time to time -# or don't show any output when using Qt's WebEngine with the default -# KNOB_MAX_WORKER_THREADS value of 65535 (from -# OpenGL-2020-GCCcore-9.3.0.eb). Even with a value of 10 this behavior -# doesn't vanish. Thus, don't use WebEngine at all, although it makes -# nicer output. -configopts = '--without-web-engine' - -moduleclass = 'perf' diff --git a/Golden_Repo/c/CubeLib/CubeLib-4.5-GCCcore-9.3.0.eb b/Golden_Repo/c/CubeLib/CubeLib-4.5-GCCcore-9.3.0.eb deleted file mode 100644 index 6fd28ca82f5fdbdccbe7f0d5c83c266444026a85..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeLib/CubeLib-4.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,57 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018-2020 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeLib' -version = '4.5' - -homepage = 'http://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube general purpose C++ library component and -command-line tools. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/cube/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/cubelib-config', - ('lib/libcube4.a', 'lib64/libcube4.a'), - ('lib/libcube4.%s' % SHLIB_EXT, 'lib64/libcube4.%s' % SHLIB_EXT)], - 'dirs': ['include/cubelib'], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/c/CubeLib/CubeLib-4.6-GCCcore-10.3.0.eb b/Golden_Repo/c/CubeLib/CubeLib-4.6-GCCcore-10.3.0.eb deleted file mode 100644 index b60e27af468f6d4466b8146e5f0ef746466ff6f0..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeLib/CubeLib-4.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,60 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018-2021 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeLib' -version = '4.6' - -homepage = 'https://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube general purpose C++ library component and -command-line tools. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/cubelib/tags/cubelib-%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - '36eaffa7688db8b9304c9e48ca5dc4edc2cb66538aaf48657b9b5ccd7979385b', # cubelib-4.6.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/cubelib-config', - ('lib/libcube4.a', 'lib64/libcube4.a'), - ('lib/libcube4.%s' % SHLIB_EXT, 'lib64/libcube4.%s' % SHLIB_EXT)], - 'dirs': ['include/cubelib'], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/c/CubeLib/CubeLib-4.6-GCCcore-9.3.0.eb b/Golden_Repo/c/CubeLib/CubeLib-4.6-GCCcore-9.3.0.eb deleted file mode 100644 index a6abfbcdd54a351410d94af32395e799fa0dc298..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeLib/CubeLib-4.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,60 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018-2021 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeLib' -version = '4.6' - -homepage = 'https://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube general purpose C++ library component and -command-line tools. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/cubelib/tags/cubelib-%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - '36eaffa7688db8b9304c9e48ca5dc4edc2cb66538aaf48657b9b5ccd7979385b', # cubelib-4.6.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/cubelib-config', - ('lib/libcube4.a', 'lib64/libcube4.a'), - ('lib/libcube4.%s' % SHLIB_EXT, 'lib64/libcube4.%s' % SHLIB_EXT)], - 'dirs': ['include/cubelib'], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/c/CubeWriter/CubeWriter-4.5-GCCcore-9.3.0.eb b/Golden_Repo/c/CubeWriter/CubeWriter-4.5-GCCcore-9.3.0.eb deleted file mode 100644 index 6327aa905f54f8e104b33bf08762b419b4506621..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeWriter/CubeWriter-4.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,56 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018-2020 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeWriter' -version = '4.5' - -homepage = 'http://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube high-performance C writer library component. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/cube/%(version_major_minor)s/dist'] -sources = ['cubew-%(version)s.tar.gz'] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/cubew-config', - ('lib/libcube4w.a', 'lib64/libcube4w.a'), - ('lib/libcube4w.%s' % SHLIB_EXT, 'lib64/libcube4w.%s' % SHLIB_EXT)], - 'dirs': ['include/cubew'], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/c/CubeWriter/CubeWriter-4.6-GCCcore-10.3.0.eb b/Golden_Repo/c/CubeWriter/CubeWriter-4.6-GCCcore-10.3.0.eb deleted file mode 100644 index 39e0bd10ce887ace4e794dfa425b879e7e280efc..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeWriter/CubeWriter-4.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,59 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018-2021 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeWriter' -version = '4.6' - -homepage = 'https://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube high-performance C writer library component. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/cubew/tags/cubew-%(version)s'] -sources = ['cubew-%(version)s.tar.gz'] -checksums = [ - '99fe58ce7ab13061ebfbc360aedaecc28099a30636c5269a42c0cbaf57149aa8', # cubew-4.6.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/cubew-config', - ('lib/libcube4w.a', 'lib64/libcube4w.a'), - ('lib/libcube4w.%s' % SHLIB_EXT, 'lib64/libcube4w.%s' % SHLIB_EXT)], - 'dirs': ['include/cubew'], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/c/CubeWriter/CubeWriter-4.6-GCCcore-9.3.0.eb b/Golden_Repo/c/CubeWriter/CubeWriter-4.6-GCCcore-9.3.0.eb deleted file mode 100644 index 70965035c7752a233672c0c90874a9d6440fe894..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/CubeWriter/CubeWriter-4.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,59 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2018-2021 Juelich Supercomputing Centre, Germany -# Authors:: Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'CubeWriter' -version = '4.6' - -homepage = 'https://www.scalasca.org/software/cube-4.x/download.html' -description = """ -Cube, which is used as performance report explorer for Scalasca and Score-P, -is a generic tool for displaying a multi-dimensional performance space -consisting of the dimensions (i) performance metric, (ii) call path, and -(iii) system resource. Each dimension can be represented as a tree, where -non-leaf nodes of the tree can be collapsed or expanded to achieve the -desired level of granularity. - -This module provides the Cube high-performance C writer library component. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/cubew/tags/cubew-%(version)s'] -sources = ['cubew-%(version)s.tar.gz'] -checksums = [ - '99fe58ce7ab13061ebfbc360aedaecc28099a30636c5269a42c0cbaf57149aa8', # cubew-4.6.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/cubew-config', - ('lib/libcube4w.a', 'lib64/libcube4w.a'), - ('lib/libcube4w.%s' % SHLIB_EXT, 'lib64/libcube4w.%s' % SHLIB_EXT)], - 'dirs': ['include/cubew'], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/c/cURL/cURL-7.71.1-GCCcore-10.3.0.eb b/Golden_Repo/c/cURL/cURL-7.71.1-GCCcore-10.3.0.eb deleted file mode 100644 index 80347ebdcb93517361162133022b5e19c28c8e67..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cURL/cURL-7.71.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'cURL' -version = '7.71.1' - -homepage = 'http://curl.haxx.se' -description = """libcurl is a free and easy-to-use client-side URL transfer - library, supporting DICT, FILE, FTP, FTPS, Gopher, HTTP, HTTPS, IMAP, IMAPS, - LDAP, LDAPS, POP3, POP3S, RTMP, RTSP, SCP, SFTP, SMTP, SMTPS, Telnet and TFTP. - libcurl supports SSL certificates, HTTP POST, HTTP PUT, FTP uploading, HTTP - form based upload, proxies, cookies, user+password authentication (Basic, - Digest, NTLM, Negotiate, Kerberos), file transfer resume, http proxy tunneling - and more. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://%(namelower)s.haxx.se/download/'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -modextravars = {'CURL_INCLUDES': '%(installdir)s/include'} - -sanity_check_paths = { - 'files': ['bin/%(namelower)s', 'lib/libcurl.a', 'lib/libcurl.so'], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/c/cURL/cURL-7.71.1-GCCcore-9.3.0.eb b/Golden_Repo/c/cURL/cURL-7.71.1-GCCcore-9.3.0.eb deleted file mode 100644 index a6ff6b214ede53636fa916b586d196f5292d6ace..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cURL/cURL-7.71.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'cURL' -version = '7.71.1' - -homepage = 'http://curl.haxx.se' -description = """libcurl is a free and easy-to-use client-side URL transfer - library, supporting DICT, FILE, FTP, FTPS, Gopher, HTTP, HTTPS, IMAP, IMAPS, - LDAP, LDAPS, POP3, POP3S, RTMP, RTSP, SCP, SFTP, SMTP, SMTPS, Telnet and TFTP. - libcurl supports SSL certificates, HTTP POST, HTTP PUT, FTP uploading, HTTP - form based upload, proxies, cookies, user+password authentication (Basic, - Digest, NTLM, Negotiate, Kerberos), file transfer resume, http proxy tunneling - and more. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://%(namelower)s.haxx.se/download/'] - -builddependencies = [ - ('binutils', '2.34'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -modextravars = {'CURL_INCLUDES': '%(installdir)s/include'} - -sanity_check_paths = { - 'files': ['bin/%(namelower)s', 'lib/libcurl.a', 'lib/libcurl.so'], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/c/cairo/cairo-1.17.2-GCCcore-10.3.0.eb b/Golden_Repo/c/cairo/cairo-1.17.2-GCCcore-10.3.0.eb deleted file mode 100644 index 2bfdf3a482733f33a1157fccc1ca91b4e167d58e..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cairo/cairo-1.17.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'cairo' -version = '1.17.2' - -homepage = 'http://cairographics.org' -description = """Cairo is a 2D graphics library with support for multiple output devices. - Currently supported output targets include the X Window System (via both Xlib and XCB), Quartz, Win32, image buffers, - PostScript, PDF, and SVG file output. Experimental backends include OpenGL, BeOS, OS/2, and DirectFB -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://cairographics.org/snapshots/'] -sources = [SOURCE_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Coreutils', '8.32'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('X11', '20200222'), - ('freetype', '2.10.1'), - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('expat', '2.2.9'), - ('GLib', '2.64.4'), - ('pixman', '0.40.0'), -] - -# disable symbol lookup, which requires -lbfd, to avoid link issues with (non-PIC) libiberty.a provided by GCC -configopts = "--enable-symbol-lookup=no --enable-gobject=yes --enable-svg=yes --enable-tee=yes " - -# workaround for "hidden symbol .* in .* is referenced by DSO" and "ld: final link failed: Bad value" -buildopts = 'LD="$CC"' - -sanity_check_paths = { - 'files': ['bin/cairo-trace', 'lib/cairo/libcairo-trace.so', 'lib/cairo/libcairo-trace.a', - 'lib/libcairo.a', 'lib/libcairo-gobject.a', 'lib/libcairo-script-interpreter.a', - 'lib/libcairo-gobject.so', 'lib/libcairo-script-interpreter.so', 'lib/libcairo.so'] + - ['include/cairo/cairo%s.h' % x for x in ['', '-deprecated', '-features', '-ft', '-gobject', '-pdf', '-ps', - '-script', '-script-interpreter', '-svg', '-version', '-xcb', - '-xlib', '-xlib-xrender']], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/c/cairo/cairo-1.17.2-GCCcore-9.3.0.eb b/Golden_Repo/c/cairo/cairo-1.17.2-GCCcore-9.3.0.eb deleted file mode 100644 index b38ccb8ce9381834c4bbf94618a24a7ee79e35e6..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cairo/cairo-1.17.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'cairo' -version = '1.17.2' - -homepage = 'http://cairographics.org' -description = """Cairo is a 2D graphics library with support for multiple output devices. - Currently supported output targets include the X Window System (via both Xlib and XCB), Quartz, Win32, image buffers, - PostScript, PDF, and SVG file output. Experimental backends include OpenGL, BeOS, OS/2, and DirectFB -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://cairographics.org/snapshots/'] -sources = [SOURCE_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('Coreutils', '8.32'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('X11', '20200222'), - ('freetype', '2.10.1'), - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('expat', '2.2.9'), - ('GLib', '2.64.4'), - ('pixman', '0.40.0'), -] - -# disable symbol lookup, which requires -lbfd, to avoid link issues with (non-PIC) libiberty.a provided by GCC -configopts = "--enable-symbol-lookup=no --enable-gobject=yes --enable-svg=yes --enable-tee=yes " - -# workaround for "hidden symbol .* in .* is referenced by DSO" and "ld: final link failed: Bad value" -buildopts = 'LD="$CC"' - -sanity_check_paths = { - 'files': ['bin/cairo-trace', 'lib/cairo/libcairo-trace.so', 'lib/cairo/libcairo-trace.a', - 'lib/libcairo.a', 'lib/libcairo-gobject.a', 'lib/libcairo-script-interpreter.a', - 'lib/libcairo-gobject.so', 'lib/libcairo-script-interpreter.so', 'lib/libcairo.so'] + - ['include/cairo/cairo%s.h' % x for x in ['', '-deprecated', '-features', '-ft', '-gobject', '-pdf', '-ps', - '-script', '-script-interpreter', '-svg', '-version', '-xcb', - '-xlib', '-xlib-xrender']], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/c/ccache/ccache-4.3.eb b/Golden_Repo/c/ccache/ccache-4.3.eb deleted file mode 100644 index c8cc6b62f41ed3a32aa96c8b724b370511256551..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/ccache/ccache-4.3.eb +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL - -easyblock = 'CMakeNinja' - -name = 'ccache' -version = '4.3' - -homepage = 'https://ccache.dev/' -description = """Ccache (or “ccache”) is a compiler cache. It speeds up recompilation by -caching previous compilations and detecting when the same compilation is being done again""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://github.com/ccache/ccache/releases/download/v%(version)s/'] -sources = [SOURCE_TAR_GZ] -checksums = ['b9789c42e52c73e99428f311a34def9ffec3462736439afd12dbacc7987c1533'] - -osdependencies = [('glibc-static', 'libc6-dev')] - -local_gccver = '10.3.0' -builddependencies = [ - ('GCC', local_gccver), - ('CMake', '3.18.0', '', SYSTEM), - ('Ninja', '1.10.0', '', ('GCCcore', local_gccver)), - ('zstd', '1.4.9', '', ('GCCcore', local_gccver)), -] - -# use BFD linker rather than default ld.gold (required on CentOS 8) -preconfigopts = 'LDFLAGS="-static -fuse-ld=bfd"' -configopts = '-DENABLE_DOCUMENTATION=OFF -DENABLE_IPO=ON -DZSTD_LIBRARY="$EBROOTZSTD/lib/libzstd.a" ' -# disable hunt for faster linker, since using ld.gold may fail (on CentOS 8, for example) -configopts += '-DUSE_FASTER_LINKER=OFF' - -sanity_check_paths = { - 'files': ['bin/ccache'], - 'dirs': [] -} -sanity_check_commands = ['ccache --help'] - -moduleclass = 'tools' diff --git a/Golden_Repo/c/cppcheck/cppcheck-2.2-GCCcore-10.3.0.eb b/Golden_Repo/c/cppcheck/cppcheck-2.2-GCCcore-10.3.0.eb deleted file mode 100644 index 498d980dfdb76afa15b018741486a3da37134825..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cppcheck/cppcheck-2.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'CMakeMake' -name = 'cppcheck' -version = '2.2' - -homepage = 'http://cppcheck.sourceforge.net/' -description = """Cppcheck is a static analysis tool for C/C++ code""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [SOURCEFORGE_SOURCE] -sources = ['%(name)s-%(version)s.tar.bz2'] - -dependencies = [ - ('binutils', '2.36.1'), - ('Qt5', '5.14.2'), - ('PCRE', '8.44'), - ('CMake', '3.18.0', '', SYSTEM), -] - -configopts = '-DUSE_Z3:BOOL=OFF' - -sanity_check_paths = { - 'files': ['bin/cppcheck'], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/c/cppcheck/cppcheck-2.2-GCCcore-9.3.0.eb b/Golden_Repo/c/cppcheck/cppcheck-2.2-GCCcore-9.3.0.eb deleted file mode 100644 index a9c8a8dc2a70edc8681c36946c8e69e436baedb4..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cppcheck/cppcheck-2.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'CMakeMake' -name = 'cppcheck' -version = '2.2' - -homepage = 'http://cppcheck.sourceforge.net/' -description = """Cppcheck is a static analysis tool for C/C++ code""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [SOURCEFORGE_SOURCE] -sources = ['%(name)s-%(version)s.tar.bz2'] - -dependencies = [ - ('binutils', '2.34'), - ('Qt5', '5.14.2'), - ('PCRE', '8.44'), - ('CMake', '3.18.0'), -] - -configopts = '-DUSE_Z3:BOOL=OFF' - -sanity_check_paths = { - 'files': ['bin/cppcheck'], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/c/cuDNN/cuDNN-8.0.2.39-CUDA-11.0.eb b/Golden_Repo/c/cuDNN/cuDNN-8.0.2.39-CUDA-11.0.eb deleted file mode 100644 index 87c1a67a8405cccfa5bb0834422fc142f511d77b..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cuDNN/cuDNN-8.0.2.39-CUDA-11.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -## -# Author: Stephane Thiell <sthiell@stanford.edu> -## -easyblock = 'Tarball' - -name = 'cuDNN' - -version = '8.0.2.39' -local_cuda_version = '11.0' - -versionsuffix = '-CUDA-%s' % local_cuda_version - -homepage = 'https://developer.nvidia.com/cudnn' -description = """The NVIDIA CUDA Deep Neural Network library (cuDNN) is a -GPU-accelerated library of primitives for deep neural networks.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -# Nvidia developer registration required. -# Download link: https://developer.nvidia.com/rdp/cudnn-download -sources = ['%%(namelower)s-%s-linux-x64-v%%(version)s.tgz' % local_cuda_version] -checksums = ['672f46288b8edd98f8d156a4f1ff518201ca6de0cff67915ceaa37f6d6d86345'] - -dependencies = [('CUDA', local_cuda_version)] - -sanity_check_paths = { - 'files': ['include/cudnn.h', 'lib64/libcudnn_static.a'], - 'dirs': ['include', 'lib64'], -} - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'numlib' diff --git a/Golden_Repo/c/cuDNN/cuDNN-8.2.1.32-CUDA-11.3.eb b/Golden_Repo/c/cuDNN/cuDNN-8.2.1.32-CUDA-11.3.eb deleted file mode 100644 index e06059e4373f2102b7dee9d7edf4c367a284fe26..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cuDNN/cuDNN-8.2.1.32-CUDA-11.3.eb +++ /dev/null @@ -1,37 +0,0 @@ -## -# Author: Stephane Thiell <sthiell@stanford.edu> -## -easyblock = 'Tarball' - -name = 'cuDNN' - -version = '8.2.1.32' - -local_cuda_version = '11.3' - -versionsuffix = '-CUDA-%s' % local_cuda_version - -homepage = 'https://developer.nvidia.com/cudnn' -description = """The NVIDIA CUDA Deep Neural Network library (cuDNN) is a -GPU-accelerated library of primitives for deep neural networks.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -# Nvidia developer registration required. -# Download link: https://developer.nvidia.com/rdp/cudnn-download -sources = ['%%(namelower)s-%s-linux-x64-v%%(version)s.tgz' % - local_cuda_version] -dependencies = [('CUDA', local_cuda_version)] - -sanity_check_paths = { - 'files': ['include/cudnn.h', 'lib64/libcudnn_static.a'], - 'dirs': ['include', 'lib64'], -} - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'numlib' diff --git a/Golden_Repo/c/cuTENSOR/cuTENSOR-1.3.1.3-GCCcore-10.3.0.eb b/Golden_Repo/c/cuTENSOR/cuTENSOR-1.3.1.3-GCCcore-10.3.0.eb deleted file mode 100644 index 9a37a4c2560eb9ae4418065ff182100b2df669aa..0000000000000000000000000000000000000000 --- a/Golden_Repo/c/cuTENSOR/cuTENSOR-1.3.1.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -# LooseVersion used to determine major version from cudaver. -from distutils.version import LooseVersion as local_LooseVersion - -easyblock = 'Tarball' - -name = 'cuTENSOR' -version = '1.3.1.3' - -homepage = 'https://developer.nvidia.com/cutensor' -description = """The cuTENSOR Library is a GPU-accelerated tensor linear algebra library providing tensor contraction, -reduction and elementwise operations.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'https://developer.download.nvidia.com/compute/cutensor/' - '{0:d}.{1:d}.{2:d}/local_installers/'.format( - *local_LooseVersion(version).version[:3]) -] -sources = ['libcutensor-linux-%(arch)s-%(version)s.tar.gz'] -checksums = [ - { - 'libcutensor-linux-x86_64-1.3.1.3.tar.gz': - '98d9559da6c1d880b71e7618d266f4e912ea4330f137d78f195781cb7864042f', - 'libcutensor-linux-ppc64le-1.3.1.3.tar.gz': - '1621f950b91929abf05ab581b16a71285370523540d7be3fdf6fc1259e44f6ac', - 'libcutensor-linux-sbsa-1.3.1.3.tar.gz': - '8a3f8bb65ddcaba80fc23178f79c993162363e001c33c40f4cf1641825d60b10', - } -] - -dependencies = [('CUDA', '11.3', '', SYSTEM)] - -sanity_check_paths = { - 'files': ['include/cutensor.h', 'include/cutensor/types.h', - 'lib/%s/libcutensor.%s' % ('%(cudamajver)s', SHLIB_EXT), - 'lib/%s/libcutensor_static.a' % '%(cudamajver)s'], - 'dirs': [], -} - -modextrapaths = { - 'LD_LIBRARY_PATH': ['lib/%s' % '%(cudamajver)s'], - 'LIBRARY_PATH': ['lib/%s' % '%(cudamajver)s'], -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/d/DBus/DBus-1.12.20-GCCcore-10.3.0.eb b/Golden_Repo/d/DBus/DBus-1.12.20-GCCcore-10.3.0.eb deleted file mode 100644 index b6d4b6596c8ad78c8c5905501d9132977c5ac586..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/DBus/DBus-1.12.20-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'DBus' -version = '1.12.20' - -homepage = 'http://dbus.freedesktop.org/doc/dbus-glib' -description = """D-Bus is a message bus system, a simple way for applications to talk - to one another. In addition to interprocess communication, D-Bus helps - coordinate process lifecycle; it makes it simple and reliable to code - a "single instance" application or daemon, and to launch applications - and daemons on demand when their services are needed. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://dbus.freedesktop.org/releases/dbus'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] -dependencies = [('expat', '2.2.9')] - -sanity_check_paths = { - 'files': ['bin/dbus-%s' % x for x in ['cleanup-sockets', 'daemon', 'launch', 'monitor', 'run-session', 'send', - 'uuidgen']] + - ['lib/libdbus-1.%s' % x for x in ['a', SHLIB_EXT]], - 'dirs': ['include', 'share'] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/d/DBus/DBus-1.12.20-GCCcore-9.3.0.eb b/Golden_Repo/d/DBus/DBus-1.12.20-GCCcore-9.3.0.eb deleted file mode 100644 index 6ed53869809c0c63ec595d9977e5c48c21b6323e..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/DBus/DBus-1.12.20-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'DBus' -version = '1.12.20' - -homepage = 'http://dbus.freedesktop.org/doc/dbus-glib' -description = """D-Bus is a message bus system, a simple way for applications to talk - to one another. In addition to interprocess communication, D-Bus helps - coordinate process lifecycle; it makes it simple and reliable to code - a "single instance" application or daemon, and to launch applications - and daemons on demand when their services are needed. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://dbus.freedesktop.org/releases/dbus'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] -dependencies = [('expat', '2.2.9')] - -sanity_check_paths = { - 'files': ['bin/dbus-%s' % x for x in ['cleanup-sockets', 'daemon', 'launch', 'monitor', 'run-session', 'send', - 'uuidgen']] + - ['lib/libdbus-1.%s' % x for x in ['a', SHLIB_EXT]], - 'dirs': ['include', 'share'] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/d/DWave/DWave-3.2.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/d/DWave/DWave-3.2.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index 5187bff64457b3af73496b4edf57d0e2eb0d8a87..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/DWave/DWave-3.2.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,139 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'DWave' -version = '3.2.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://docs.ocean.dwavesys.com' -description = """Ocean software is a suite of tools D-Wave Systems for solving hard problems with quantum computers.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), - ('Boost.Python', '1.74.0', '-nompi'), - ('protobuf', '3.13.0'), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': True, - 'download_dep_fail': True, - 'use_pip_for_deps': False, -} - -exts_list = [ - ('plucky', '0.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5bc75d43ae6b40f1b7ba42000b37e4934fa6bd2d6a6cd4e47461f803a404c194')]), - ])), - ('homebase', '1.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9ee008df4298b420852d815e6df488822229c4bd8d571bcd0a454e04232c635e')]), - ])), - ('dwave-cloud-client', '0.8.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5472aa523311380bdb5b4e659d127d4fc7acdef73e03234df23890972be2fca3')]), - ('modulename', 'dwave.cloud'), - ])), - ('dwave_networkx', '0.8.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9d6bb1f93d89511aeaa191319da9970e48134a4dccecff59c972e8f1f3107387')]), - ('modulename', 'dwave_networkx'), - ])), - ('dwave-system', '1.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ec1283592211016b30587c67837b23898a74b809de6d715447ff2822798b26f1')]), - ('modulename', 'dwave.system'), - ])), - ('dwave-qbsolv', '0.3.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'aca74f909748cd02d3e59647ae6c81585dd75afcf53a19fa116580c7c7873782')]), - ('modulename', 'dwave_qbsolv'), - ])), - ('dwave-hybrid', '0.6.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd8b195dabe9e630c31bb9e3362d1cb6d3fab933b94d175719cd3771e346d5934')]), - ('modulename', 'hybrid'), - ])), - ('dwave-neal', '0.5.7', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '86da2141f81ade1e69d595a9840222a45c47b19577c037ef3d4988b5463c26f8')]), - ('modulename', 'neal'), - ])), - ('dimod', '0.9.13', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a9ab210a2737199b824191089e71fa669b2760b168d0f7ad5aaa7fddcada933f')]), - ])), - ('dwavebinarycsp', '0.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bf2000c2738df2b3924470f080e73c42e7246b5137fdedc7a2627d5e08479bdf')]), - ])), - ('fasteners', '0.16', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c995d8c26b017c5d6a6de9ad29a0f9cdd57de61ae1113d28fac26622b06a0933')]), - ])), - ('minorminer', '0.2.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3a521099ec64c99295ae8205b08da29c06aad81d3be74fb27a58d22e220a2a33')]), - ])), - ('penaltymodel', '0.16.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e2d9e94154a675b33db59dadb9856ee4c8bc1aee1647c664df1b17bc53b04f2a')]), - ])), - ('penaltymodel-cache', '0.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '93b76ba83e9b39bca705c341b5e925f4ff5841c20f3e5fac962304656f1ec66e')]), - ('modulename', 'penaltymodel.cache') - ])), - ('penaltymodel-lp', '0.1.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '044ec3a12f78003b044c05b66e9597131bcbb47d775db03dba2d1dc45d2f0efb')]), - ('modulename', 'penaltymodel.lp') - ])), - ('protobuf', '3.13.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6a82e0c8bb2bf58f606040cc5814e07715b2094caeba281e2e7d0b0e2e397db5')]), - ('modulename', 'google.protobuf') - ])), - ('ortools', '8.0.8283', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'ortools-8.0.8283-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '31020d0b46c8e4ff7d920803c3bb5cbfc5630d319b9b46f70de8d18f9456e9c9')]), - ('unpack_sources', False), - ])), - ('penaltymodel-mip', '0.2.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c3471a8f10107b163ab0035125fe861a3c55808e7656db9ed524451667ff1e38')]), - ('modulename', 'penaltymodel.mip') - ])), - ('wrapt', '1.12.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7')]), - ])), - ('Deprecated', '1.2.10', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74')]), - ])), - ('pyqubo', '1.0.7', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'pyqubo-1.0.7-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '79dfd9a7f2f75a216838e357959b321d55ced7cdf4559037a4704d2f2927f6ba')]), - ('unpack_sources', False), - ])), - ('importlib_resources', '1.0.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd3279fd0f6f847cced9f7acc19bd3e5df54d34f93a2e7bb5f238f81545787078')]), - ])), - ('dwave-inspector', '0.2.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b596e3cc0055e373e0955b402e1ee43998e0ceb2968e09eeb8a194c67b080e38')]), - ('modulename', 'dwave.inspector'), - ])), - ('dwave-tabu', '0.3.1', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'dwave_tabu-0.3.1-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '6a57c6e0c6d6dce912d36c2c183a2a841c1f2830fab7434ddb912e5200d7dc2f')]), - ('unpack_sources', False), - ('modulename', 'tabu'), - ])), - ('dwave-greedy', '0.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'dwave_greedy-0.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '88cdb3897159880d02b6e5880285b2b5a5295c3a725ce7c990ce4e72c21724ac')]), - ('unpack_sources', False), - ('modulename', 'greedy'), - ])), - ('dwave-ocean-sdk', version, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a8a5e0acbd7483f62f65beb6f2fdb397d5370e20df949434521cae0635d954e9')]), - ('skipsteps', ['sanitycheck']), - ('modulename', 'dwave.system'), # just a fake for sanity-check - ])), -] - -moduleclass = 'quantum' diff --git a/Golden_Repo/d/DWave/DWave-3.2.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/d/DWave/DWave-3.2.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 6fb629051b036b532cd2a692ea05a1f66b20e56d..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/DWave/DWave-3.2.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,139 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'DWave' -version = '3.2.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://docs.ocean.dwavesys.com' -description = """Ocean software is a suite of tools D-Wave Systems for solving hard problems with quantum computers.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), - ('Boost.Python', '1.74.0', '-nompi'), - ('protobuf', '3.13.0'), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': True, - 'download_dep_fail': True, - 'use_pip_for_deps': False, -} - -exts_list = [ - ('plucky', '0.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5bc75d43ae6b40f1b7ba42000b37e4934fa6bd2d6a6cd4e47461f803a404c194')]), - ])), - ('homebase', '1.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9ee008df4298b420852d815e6df488822229c4bd8d571bcd0a454e04232c635e')]), - ])), - ('dwave-cloud-client', '0.8.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5472aa523311380bdb5b4e659d127d4fc7acdef73e03234df23890972be2fca3')]), - ('modulename', 'dwave.cloud'), - ])), - ('dwave_networkx', '0.8.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9d6bb1f93d89511aeaa191319da9970e48134a4dccecff59c972e8f1f3107387')]), - ('modulename', 'dwave_networkx'), - ])), - ('dwave-system', '1.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ec1283592211016b30587c67837b23898a74b809de6d715447ff2822798b26f1')]), - ('modulename', 'dwave.system'), - ])), - ('dwave-qbsolv', '0.3.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'aca74f909748cd02d3e59647ae6c81585dd75afcf53a19fa116580c7c7873782')]), - ('modulename', 'dwave_qbsolv'), - ])), - ('dwave-hybrid', '0.6.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd8b195dabe9e630c31bb9e3362d1cb6d3fab933b94d175719cd3771e346d5934')]), - ('modulename', 'hybrid'), - ])), - ('dwave-neal', '0.5.7', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '86da2141f81ade1e69d595a9840222a45c47b19577c037ef3d4988b5463c26f8')]), - ('modulename', 'neal'), - ])), - ('dimod', '0.9.13', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a9ab210a2737199b824191089e71fa669b2760b168d0f7ad5aaa7fddcada933f')]), - ])), - ('dwavebinarycsp', '0.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bf2000c2738df2b3924470f080e73c42e7246b5137fdedc7a2627d5e08479bdf')]), - ])), - ('fasteners', '0.16', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c995d8c26b017c5d6a6de9ad29a0f9cdd57de61ae1113d28fac26622b06a0933')]), - ])), - ('minorminer', '0.2.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3a521099ec64c99295ae8205b08da29c06aad81d3be74fb27a58d22e220a2a33')]), - ])), - ('penaltymodel', '0.16.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e2d9e94154a675b33db59dadb9856ee4c8bc1aee1647c664df1b17bc53b04f2a')]), - ])), - ('penaltymodel-cache', '0.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '93b76ba83e9b39bca705c341b5e925f4ff5841c20f3e5fac962304656f1ec66e')]), - ('modulename', 'penaltymodel.cache') - ])), - ('penaltymodel-lp', '0.1.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '044ec3a12f78003b044c05b66e9597131bcbb47d775db03dba2d1dc45d2f0efb')]), - ('modulename', 'penaltymodel.lp') - ])), - ('protobuf', '3.13.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6a82e0c8bb2bf58f606040cc5814e07715b2094caeba281e2e7d0b0e2e397db5')]), - ('modulename', 'google.protobuf') - ])), - ('ortools', '8.0.8283', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'ortools-8.0.8283-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '31020d0b46c8e4ff7d920803c3bb5cbfc5630d319b9b46f70de8d18f9456e9c9')]), - ('unpack_sources', False), - ])), - ('penaltymodel-mip', '0.2.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c3471a8f10107b163ab0035125fe861a3c55808e7656db9ed524451667ff1e38')]), - ('modulename', 'penaltymodel.mip') - ])), - ('wrapt', '1.12.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7')]), - ])), - ('Deprecated', '1.2.10', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74')]), - ])), - ('pyqubo', '1.0.7', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'pyqubo-1.0.7-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '79dfd9a7f2f75a216838e357959b321d55ced7cdf4559037a4704d2f2927f6ba')]), - ('unpack_sources', False), - ])), - ('importlib_resources', '1.0.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd3279fd0f6f847cced9f7acc19bd3e5df54d34f93a2e7bb5f238f81545787078')]), - ])), - ('dwave-inspector', '0.2.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b596e3cc0055e373e0955b402e1ee43998e0ceb2968e09eeb8a194c67b080e38')]), - ('modulename', 'dwave.inspector'), - ])), - ('dwave-tabu', '0.3.1', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'dwave_tabu-0.3.1-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '6a57c6e0c6d6dce912d36c2c183a2a841c1f2830fab7434ddb912e5200d7dc2f')]), - ('unpack_sources', False), - ('modulename', 'tabu'), - ])), - ('dwave-greedy', '0.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'dwave_greedy-0.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '88cdb3897159880d02b6e5880285b2b5a5295c3a725ce7c990ce4e72c21724ac')]), - ('unpack_sources', False), - ('modulename', 'greedy'), - ])), - ('dwave-ocean-sdk', version, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a8a5e0acbd7483f62f65beb6f2fdb397d5370e20df949434521cae0635d954e9')]), - ('skipsteps', ['sanitycheck']), - ('modulename', 'dwave.system'), # just a fake for sanity-check - ])), -] - -moduleclass = 'quantum' diff --git a/Golden_Repo/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb b/Golden_Repo/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb deleted file mode 100644 index bf660969370912f41428aaf2a52eb6b938dc8a3c..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb +++ /dev/null @@ -1,23 +0,0 @@ -name = 'Doxygen' -version = '1.8.18' - -homepage = 'http://www.doxygen.org' -description = """Doxygen is a documentation system for C++, C, Java, Objective-C, Python, - IDL (Corba and Microsoft flavors), Fortran, VHDL, PHP, C#, and to some extent D. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = ['%(namelower)s-%(version)s.src.tar.gz'] -source_urls = ['http://doxygen.nl/files/'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('flex', '2.6.4'), - ('Bison', '3.7.6'), -] - -moduleclass = 'devel' diff --git a/Golden_Repo/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb b/Golden_Repo/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb deleted file mode 100644 index e85e36f93f1b1d6c20b4310264ce0d9536527c73..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb +++ /dev/null @@ -1,23 +0,0 @@ -name = 'Doxygen' -version = '1.8.18' - -homepage = 'http://www.doxygen.org' -description = """Doxygen is a documentation system for C++, C, Java, Objective-C, Python, - IDL (Corba and Microsoft flavors), Fortran, VHDL, PHP, C#, and to some extent D. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = ['%(namelower)s-%(version)s.src.tar.gz'] -source_urls = ['http://doxygen.nl/files/'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('flex', '2.6.4'), - ('Bison', '3.6.4'), -] - -moduleclass = 'devel' diff --git a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.2.1-gpsmpi-2020.eb b/Golden_Repo/d/darshan-runtime/darshan-runtime-3.2.1-gpsmpi-2020.eb deleted file mode 100644 index e691c5c2f9da15b733a0722effdb5c67b0f80305..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.2.1-gpsmpi-2020.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-runtime" -version = "3.2.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -Export the environment variable DARSHAN_LOG_PATH to where the logging -data should be written, e.g. - -LD_PRELOAD=$EBROOTDARSHANMINRUNTIME/lib/libdarshan.so \ -DARSHAN_LOG_PATH=/path/to/your/logdir \ -srun -n 32 ./executable - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-runtime' -preconfigopts = 'cd %s;' % local_subpath -configopts = '--with-mem-align=8 --with-log-path-by-env=DARSHAN_LOG_PATH ' -configopts += ' --with-jobid-env=SLURM_JOBID CC=mpicc --enable-hdf5-mod=$EBROOTHDF5' - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -sanity_check_paths = { - 'files': ["lib/libdarshan.so"], - 'dirs': [] -} - -dependencies = [ - ("HDF5", "1.10.6"), -] - - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-gompi-2020.eb b/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-gompi-2020.eb deleted file mode 100644 index 5145ef96f3462d46cbd75d79bb01b5f52d62489d..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-gompi-2020.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-runtime" -version = "3.3.0" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -Export the environment variable DARSHAN_LOG_PATH to where the logging -data should be written, e.g. - -LD_PRELOAD=$EBROOTDARSHANMINRUNTIME/lib/libdarshan.so \ -DARSHAN_LOG_PATH=/path/to/your/logdir \ -srun -n 32 ./executable - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-runtime' -preconfigopts = 'cd %s;' % local_subpath -configopts = '--with-mem-align=8 --with-log-path-by-env=DARSHAN_LOG_PATH ' -configopts += ' --with-jobid-env=SLURM_JOBID CC=mpicc --enable-hdf5-mod=$EBROOTHDF5' - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -sanity_check_paths = { - 'files': ["lib/libdarshan.so"], - 'dirs': [] -} - -dependencies = [ - ("HDF5", "1.10.6"), -] - - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-gpsmpi-2020.eb b/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-gpsmpi-2020.eb deleted file mode 100644 index 994d32d47755586e7119a087df0f4dea977544ef..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-gpsmpi-2020.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-runtime" -version = "3.3.0" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -Export the environment variable DARSHAN_LOG_PATH to where the logging -data should be written, e.g. - -LD_PRELOAD=$EBROOTDARSHANMINRUNTIME/lib/libdarshan.so \ -DARSHAN_LOG_PATH=/path/to/your/logdir \ -srun -n 32 ./executable - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-runtime' -preconfigopts = 'cd %s;' % local_subpath -configopts = '--with-mem-align=8 --with-log-path-by-env=DARSHAN_LOG_PATH ' -configopts += ' --with-jobid-env=SLURM_JOBID CC=mpicc --enable-hdf5-mod=$EBROOTHDF5' - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -sanity_check_paths = { - 'files': ["lib/libdarshan.so"], - 'dirs': [] -} - -dependencies = [ - ("HDF5", "1.10.6"), -] - - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-ipsmpi-2020.eb b/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-ipsmpi-2020.eb deleted file mode 100644 index b9ce742c700f7160e093f245a142ab16d3000d3a..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.0-ipsmpi-2020.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-runtime" -version = "3.3.0" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -Export the environment variable DARSHAN_LOG_PATH to where the logging -data should be written, e.g. - -LD_PRELOAD=$EBROOTDARSHANMINRUNTIME/lib/libdarshan.so \ -DARSHAN_LOG_PATH=/path/to/your/logdir \ -srun -n 32 ./executable - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-runtime' -preconfigopts = 'cd %s;' % local_subpath -configopts = '--with-mem-align=8 --with-log-path-by-env=DARSHAN_LOG_PATH ' -configopts += ' --with-jobid-env=SLURM_JOBID CC=mpicc --enable-hdf5-mod=$EBROOTHDF5' - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -sanity_check_paths = { - 'files': ["lib/libdarshan.so"], - 'dirs': [] -} - -dependencies = [ - ("HDF5", "1.10.6"), -] - - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-gompi-2021.eb b/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-gompi-2021.eb deleted file mode 100644 index 97d16b5b50a347d123634b92f21bf0fd59aa020f..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-gompi-2021.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-runtime" -version = "3.3.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -Export the environment variable DARSHAN_LOG_PATH to where the logging -data should be written, e.g. - -LD_PRELOAD=$EBROOTDARSHANMINRUNTIME/lib/libdarshan.so \ -DARSHAN_LOG_PATH=/path/to/your/logdir \ -srun -n 32 ./executable - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-runtime' -preconfigopts = 'cd %s;' % local_subpath -configopts = '--with-mem-align=8 --with-log-path-by-env=DARSHAN_LOG_PATH ' -configopts += ' --with-jobid-env=SLURM_JOBID CC=mpicc --enable-hdf5-mod=$EBROOTHDF5' - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -sanity_check_paths = { - 'files': ["lib/libdarshan.so"], - 'dirs': [] -} - -dependencies = [ - ("HDF5", "1.10.6"), -] - - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-gpsmpi-2021.eb b/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-gpsmpi-2021.eb deleted file mode 100644 index 36bba411a2d72a528c4d52fcda3ec5d71648c5bf..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-gpsmpi-2021.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-runtime" -version = "3.3.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -Export the environment variable DARSHAN_LOG_PATH to where the logging -data should be written, e.g. - -LD_PRELOAD=$EBROOTDARSHANMINRUNTIME/lib/libdarshan.so \ -DARSHAN_LOG_PATH=/path/to/your/logdir \ -srun -n 32 ./executable - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-runtime' -preconfigopts = 'cd %s;' % local_subpath -configopts = '--with-mem-align=8 --with-log-path-by-env=DARSHAN_LOG_PATH ' -configopts += ' --with-jobid-env=SLURM_JOBID CC=mpicc --enable-hdf5-mod=$EBROOTHDF5' - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -sanity_check_paths = { - 'files': ["lib/libdarshan.so"], - 'dirs': [] -} - -dependencies = [ - ("HDF5", "1.10.6"), -] - - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-iimpi-2021.eb b/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-iimpi-2021.eb deleted file mode 100644 index 79f1ebf37e5ace02d2ad61fe738d29cde9539f43..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-iimpi-2021.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-runtime" -version = "3.3.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -Export the environment variable DARSHAN_LOG_PATH to where the logging -data should be written, e.g. - -LD_PRELOAD=$EBROOTDARSHANMINRUNTIME/lib/libdarshan.so \ -DARSHAN_LOG_PATH=/path/to/your/logdir \ -srun -n 32 ./executable - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-runtime' -preconfigopts = 'cd %s;' % local_subpath -configopts = '--with-mem-align=8 --with-log-path-by-env=DARSHAN_LOG_PATH ' -configopts += ' --with-jobid-env=SLURM_JOBID CC=mpicc --enable-hdf5-mod=$EBROOTHDF5' - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -sanity_check_paths = { - 'files': ["lib/libdarshan.so"], - 'dirs': [] -} - -dependencies = [ - ("HDF5", "1.10.6"), -] - - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-ipsmpi-2021.eb b/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-ipsmpi-2021.eb deleted file mode 100644 index 925e93154d0d475520cbb0842bdb4f45a00b8f71..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-runtime/darshan-runtime-3.3.1-ipsmpi-2021.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-runtime" -version = "3.3.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -Export the environment variable DARSHAN_LOG_PATH to where the logging -data should be written, e.g. - -LD_PRELOAD=$EBROOTDARSHANMINRUNTIME/lib/libdarshan.so \ -DARSHAN_LOG_PATH=/path/to/your/logdir \ -srun -n 32 ./executable - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-runtime' -preconfigopts = 'cd %s;' % local_subpath -configopts = '--with-mem-align=8 --with-log-path-by-env=DARSHAN_LOG_PATH ' -configopts += ' --with-jobid-env=SLURM_JOBID CC=mpicc --enable-hdf5-mod=$EBROOTHDF5' - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -sanity_check_paths = { - 'files': ["lib/libdarshan.so"], - 'dirs': [] -} - -dependencies = [ - ("HDF5", "1.10.6"), -] - - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-util/darshan-util-3.2.1-gpsmpi-2020.eb b/Golden_Repo/d/darshan-util/darshan-util-3.2.1-gpsmpi-2020.eb deleted file mode 100644 index 632405aa38966531ce2d859f66b52b01aa23e5fe..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-util/darshan-util-3.2.1-gpsmpi-2020.eb +++ /dev/null @@ -1,63 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-util" -version = "3.2.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -The result is a darshan log file which can be converted using - -darshan-job-summary.pl /path/to/your/logdir/mylog.darshan.gz - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-util' - -preconfigopts = 'cd %s;' % local_subpath - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -configopts = '--enable-hdf5-mod=$EBROOTHDF5' - -sanity_check_paths = { - 'files': ["bin/darshan-job-summary.pl"], - 'dirs': [] -} - -dependencies = [ - ("gnuplot", "5.2.8"), - ("Perl", "5.32.0"), - ("HDF5", "1.10.6"), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-util/darshan-util-3.3.0-gompi-2020.eb b/Golden_Repo/d/darshan-util/darshan-util-3.3.0-gompi-2020.eb deleted file mode 100644 index d0fa9e532feea28130ec9732ba0572e339908bef..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-util/darshan-util-3.3.0-gompi-2020.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-util" -version = "3.3.0" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -The result is a darshan log file which can be converted using - -darshan-job-summary.pl /path/to/your/logdir/mylog.darshan.gz - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-util' - -preconfigopts = 'cd %s;' % local_subpath - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -configopts = '--enable-hdf5-mod=$EBROOTHDF5' - -sanity_check_paths = { - 'files': ["bin/darshan-job-summary.pl"], - 'dirs': [] -} - -dependencies = [ - ("gnuplot", "5.2.8"), - ("Perl", "5.32.0"), - ("HDF5", "1.10.6"), - ('texlive', '20200406'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-util/darshan-util-3.3.0-gpsmpi-2020.eb b/Golden_Repo/d/darshan-util/darshan-util-3.3.0-gpsmpi-2020.eb deleted file mode 100644 index c86fdbe5dbe38db1279e53fa9c8bea648643405a..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-util/darshan-util-3.3.0-gpsmpi-2020.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-util" -version = "3.3.0" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -The result is a darshan log file which can be converted using - -darshan-job-summary.pl /path/to/your/logdir/mylog.darshan.gz - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-util' - -preconfigopts = 'cd %s;' % local_subpath - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -configopts = '--enable-hdf5-mod=$EBROOTHDF5' - -sanity_check_paths = { - 'files': ["bin/darshan-job-summary.pl"], - 'dirs': [] -} - -dependencies = [ - ("gnuplot", "5.2.8"), - ("Perl", "5.32.0"), - ("HDF5", "1.10.6"), - ('texlive', '20200406'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-util/darshan-util-3.3.0-ipsmpi-2020.eb b/Golden_Repo/d/darshan-util/darshan-util-3.3.0-ipsmpi-2020.eb deleted file mode 100644 index 41047ee84ec315fdeb6d268c399dc2c5f924bc77..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-util/darshan-util-3.3.0-ipsmpi-2020.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-util" -version = "3.3.0" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -The result is a darshan log file which can be converted using - -darshan-job-summary.pl /path/to/your/logdir/mylog.darshan.gz - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-util' - -preconfigopts = 'cd %s;' % local_subpath - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -configopts = '--enable-hdf5-mod=$EBROOTHDF5' - -sanity_check_paths = { - 'files': ["bin/darshan-job-summary.pl"], - 'dirs': [] -} - -dependencies = [ - ("gnuplot", "5.2.8"), - ("Perl", "5.32.0"), - ("HDF5", "1.10.6"), - ('texlive', '20200406'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-util/darshan-util-3.3.1-gompi-2021.eb b/Golden_Repo/d/darshan-util/darshan-util-3.3.1-gompi-2021.eb deleted file mode 100644 index 492e0190ed5bdadfcbdd94915bca52340ce1810b..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-util/darshan-util-3.3.1-gompi-2021.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-util" -version = "3.3.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -The result is a darshan log file which can be converted using - -darshan-job-summary.pl /path/to/your/logdir/mylog.darshan.gz - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-util' - -preconfigopts = 'cd %s;' % local_subpath - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -configopts = '--enable-hdf5-mod=$EBROOTHDF5' - -sanity_check_paths = { - 'files': ["bin/darshan-job-summary.pl"], - 'dirs': [] -} - -dependencies = [ - ("gnuplot", "5.2.8"), - ("Perl", "5.32.0"), - ("HDF5", "1.10.6"), - ('texlive', '20200406'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-util/darshan-util-3.3.1-gpsmpi-2021.eb b/Golden_Repo/d/darshan-util/darshan-util-3.3.1-gpsmpi-2021.eb deleted file mode 100644 index e624dc5fc5f10828bb741ebffab09acc6ecca00e..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-util/darshan-util-3.3.1-gpsmpi-2021.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-util" -version = "3.3.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -The result is a darshan log file which can be converted using - -darshan-job-summary.pl /path/to/your/logdir/mylog.darshan.gz - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-util' - -preconfigopts = 'cd %s;' % local_subpath - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -configopts = '--enable-hdf5-mod=$EBROOTHDF5' - -sanity_check_paths = { - 'files': ["bin/darshan-job-summary.pl"], - 'dirs': [] -} - -dependencies = [ - ("gnuplot", "5.2.8"), - ("Perl", "5.32.0"), - ("HDF5", "1.10.6"), - ('texlive', '20200406'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-util/darshan-util-3.3.1-iimpi-2021.eb b/Golden_Repo/d/darshan-util/darshan-util-3.3.1-iimpi-2021.eb deleted file mode 100644 index cd8bdf4a6b305bde0eecf5fe3c15552920f05071..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-util/darshan-util-3.3.1-iimpi-2021.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-util" -version = "3.3.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -The result is a darshan log file which can be converted using - -darshan-job-summary.pl /path/to/your/logdir/mylog.darshan.gz - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-util' - -preconfigopts = 'cd %s;' % local_subpath - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -configopts = '--enable-hdf5-mod=$EBROOTHDF5' - -sanity_check_paths = { - 'files': ["bin/darshan-job-summary.pl"], - 'dirs': [] -} - -dependencies = [ - ("gnuplot", "5.2.8"), - ("Perl", "5.32.0"), - ("HDF5", "1.10.6"), - ('texlive', '20200406'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/d/darshan-util/darshan-util-3.3.1-ipsmpi-2021.eb b/Golden_Repo/d/darshan-util/darshan-util-3.3.1-ipsmpi-2021.eb deleted file mode 100644 index e626c0e8801cfddec39cf897d9be15efe38c2b96..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/darshan-util/darshan-util-3.3.1-ipsmpi-2021.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = "ConfigureMake" -name = "darshan-util" -version = "3.3.1" - -homepage = 'http://www.mcs.anl.gov/research/projects/darshan/' - -description = """Darshan is designed to capture an accurate picture of -application I/O behavior, including properties such as patterns of -access within files, with minimum overhead. The name is taken from a -Sanskrit word for “sight” or “vision”. - -Darshan can be used to investigate and tune the I/O behavior of -complex HPC applications. In addition, Darshan’s lightweight design -makes it suitable for full time deployment for workload -characterization of large systems. We hope that such studies will -help the storage research community to better serve the needs of -scientific computing. - -Darshan was originally developed on the IBM Blue Gene series of -computers deployed at the Argonne Leadership Computing Facility, but -it is portable across a wide variety of platforms include the Cray -XE6, Cray XC30, and Linux clusters. Darshan routinely instruments -jobs using up to 786,432 compute cores on the Mira system at ALCF. -""" - -usage = """ -The result is a darshan log file which can be converted using - -darshan-job-summary.pl /path/to/your/logdir/mylog.darshan.gz - -Note: - -Darshan currently only works with C or C++ codes, not with Fortran. -""" - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} - -sources = ['ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-%s.tar.gz' % version] - -local_subpath = 'darshan-util' - -preconfigopts = 'cd %s;' % local_subpath - -prebuildopts = 'cd %s;' % local_subpath - -preinstallopts = 'cd %s;' % local_subpath - -configopts = '--enable-hdf5-mod=$EBROOTHDF5' - -sanity_check_paths = { - 'files': ["bin/darshan-job-summary.pl"], - 'dirs': [] -} - -dependencies = [ - ("gnuplot", "5.2.8"), - ("Perl", "5.32.0"), - ("HDF5", "1.10.6"), - ('texlive', '20200406'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/d/dask/dask-2.22.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/d/dask/dask-2.22.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index aa742baee982735bb87cae389f3d57b264768dbb..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/dask/dask-2.22.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'dask' -version = '2.22.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://github.com/dask/dask/' -description = """Dask provides multi-core execution on larger-than-memory datasets using blocked algorithms - and task scheduling.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} - -source_urls = ['https://github.com/dask/dask/archive/'] -sources = ['%(version)s.tar.gz'] - -download_dep_fail = True - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/d/dask/dask-2.22.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/d/dask/dask-2.22.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 7d1bb5834f2bca589660c6445d7a38602f0a3255..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/dask/dask-2.22.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'dask' -version = '2.22.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://github.com/dask/dask/' -description = """Dask provides multi-core execution on larger-than-memory datasets using blocked algorithms - and task scheduling.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -source_urls = ['https://github.com/dask/dask/archive/'] -sources = ['%(version)s.tar.gz'] - -download_dep_fail = True - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/d/distributed/distributed-2.30.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/d/distributed/distributed-2.30.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index db2ce11f77bc724288b5e9023aae9338264ec296..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/distributed/distributed-2.30.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = 'Bundle' - -name = 'distributed' -version = '2.30.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://distributed.readthedocs.io/' -description = """Dask.distributed is a lightweight library for distributed computing in Python. - It extends both the concurrent.futures and dask APIs to moderate sized clusters.""" - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -site_contacts = 'a.strube@fz-juelich.de' - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' - -dependencies = [ - ('Python', '3.8.5'), - ('dask', '2.22.0', versionsuffix), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), -] - - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'sanity_pip_check': True, -} - -exts_list = [ - ('zict', '2.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/z/zict'], - 'source_tmpl': 'zict-%(version)s.tar.gz', - }), - ('HeapDict', '1.0.1', { - 'modulename': 'heapdict', - 'source_urls': ['https://pypi.python.org/packages/source/H/HeapDict'], - }), - ('tornado', '6.0.3', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tornado'], - }), - ('tblib', '1.7.0', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tblib'], - }), - ('msgpack', '1.0.0', { - 'modulename': 'msgpack', - 'source_urls': ['https://pypi.python.org/packages/source/m/msgpack'], - }), - ('sortedcontainers', '2.3.0', { - 'source_urls': ['https://pypi.python.org/packages/source/s/sortedcontainers'], - }), - (name, version, { - 'source_urls': ['https://pypi.python.org/packages/source/d/distributed'], - }), -] - -sanity_check_paths = { - 'files': ['bin/dask-scheduler', 'bin/dask-ssh', 'bin/dask-worker'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'tools' diff --git a/Golden_Repo/d/double-conversion/double-conversion-3.1.5-GCCcore-10.3.0.eb b/Golden_Repo/d/double-conversion/double-conversion-3.1.5-GCCcore-10.3.0.eb deleted file mode 100644 index 9b17d94f1d7669ec0e1d19f04bed8ff7cfca5a26..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/double-conversion/double-conversion-3.1.5-GCCcore-10.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'double-conversion' -version = '3.1.5' - -homepage = 'https://github.com/google/double-conversion' -description = "Efficient binary-decimal and decimal-binary conversion routines for IEEE doubles." - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/google/%(name)s/archive'] -sources = ['v%(version)s.tar.gz'] -checksums = ['a63ecb93182134ba4293fd5f22d6e08ca417caafa244afaa751cbfddf6415b13'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -separate_build_dir = True - -build_type = 'Release' - -# Build static lib, static lib with -fPIC and shared lib -configopts = [ - '', - '-DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_STATIC_LIBRARY_SUFFIX_CXX=_pic.a', - '-DBUILD_SHARED_LIBS=ON' -] - -sanity_check_paths = { - 'files': ['include/double-conversion/%s.h' % h for h in ['bignum', 'cached-powers', 'diy-fp', 'double-conversion', - 'fast-dtoa', 'fixed-dtoa', 'ieee', 'strtod', 'utils']] + - ['lib/libdouble-conversion.%s' % e for e in ['a', SHLIB_EXT]] + ['lib/libdouble-conversion_pic.a'], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/d/double-conversion/double-conversion-3.1.5-GCCcore-9.3.0.eb b/Golden_Repo/d/double-conversion/double-conversion-3.1.5-GCCcore-9.3.0.eb deleted file mode 100644 index a86895e45e0710deb161269295967322b34e04a6..0000000000000000000000000000000000000000 --- a/Golden_Repo/d/double-conversion/double-conversion-3.1.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'double-conversion' -version = '3.1.5' - -homepage = 'https://github.com/google/double-conversion' -description = "Efficient binary-decimal and decimal-binary conversion routines for IEEE doubles." - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/google/%(name)s/archive'] -sources = ['v%(version)s.tar.gz'] -checksums = ['a63ecb93182134ba4293fd5f22d6e08ca417caafa244afaa751cbfddf6415b13'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -separate_build_dir = True - -build_type = 'Release' - -# Build static lib, static lib with -fPIC and shared lib -configopts = [ - '', - '-DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_STATIC_LIBRARY_SUFFIX_CXX=_pic.a', - '-DBUILD_SHARED_LIBS=ON' -] - -sanity_check_paths = { - 'files': ['include/double-conversion/%s.h' % h for h in ['bignum', 'cached-powers', 'diy-fp', 'double-conversion', - 'fast-dtoa', 'fixed-dtoa', 'ieee', 'strtod', 'utils']] + - ['lib/libdouble-conversion.%s' % e for e in ['a', SHLIB_EXT]] + ['lib/libdouble-conversion_pic.a'], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb b/Golden_Repo/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb deleted file mode 100644 index 1d278317993d8cda438640c0f9d59c1330fbf20a..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb +++ /dev/null @@ -1,97 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' -versionsuffix = '-gpu' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_gf_lp64 --lmkl_sequential -lmkl_core -lmkl_blacs_intelmpi_lp64 --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_70" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.05.001-gpsmkl-2020.eb b/Golden_Repo/e/ELPA/ELPA-2020.05.001-gpsmkl-2020.eb deleted file mode 100644 index 8d2768cc5fc39cd431f57a2371d7ffa9a8fb95ce..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.05.001-gpsmkl-2020.eb +++ /dev/null @@ -1,92 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_gf_lp64 --lmkl_sequential[-lmkl_gnu_thread] --lmkl_core -lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-2020.eb b/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-2020.eb deleted file mode 100644 index aad7788c729d4d28e69cd46831cfd00c540d7a27..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-2020.eb +++ /dev/null @@ -1,91 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_intel_lp64 --lmkl_sequential[-lmkl_intel_thread] --lmkl_core -liomp5 -lpthread -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb b/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb deleted file mode 100644 index bba062544d1a9d5efc83de2b06b3f8cf6357077a..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb +++ /dev/null @@ -1,96 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' -versionsuffix = '-gpu' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_intel_lp64 --lmkl_sequential -lmkl_core -lmkl_blacs_intelmpi_lp64 --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_70" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-para-2020.eb b/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-para-2020.eb deleted file mode 100644 index c7faa902655e88c54c955e9af8fb96a482ba1c53..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.05.001-intel-para-2020.eb +++ /dev/null @@ -1,91 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_intel_lp64 --lmkl_sequential[-lmkl_intel_thread] --lmkl_core -liomp5 -lpthread -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.05.001_install-libelpatest.patch b/Golden_Repo/e/ELPA/ELPA-2020.05.001_install-libelpatest.patch deleted file mode 100644 index e2313538704ffb43346676b235ecc0b8474ea8e7..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.05.001_install-libelpatest.patch +++ /dev/null @@ -1,5871 +0,0 @@ ---- elpa-2020.05.001/Makefile.am 2020-06-15 14:39:19.000000000 +0200 -+++ elpa-2020.05.001_ok/Makefile.am 2020-06-25 09:15:40.115746908 +0200 -@@ -498,7 +498,7 @@ - test_program_fcflags = $(AM_FCFLAGS) $(FC_MODOUT)test_modules $(FC_MODINC)test_modules $(FC_MODINC)modules $(FC_MODINC)private_modules - - # library with shared sources for the test files --noinst_LTLIBRARIES += libelpatest@SUFFIX@.la -+lib_LTLIBRARIES += libelpatest@SUFFIX@.la - libelpatest@SUFFIX@_la_FCFLAGS = $(test_program_fcflags) - libelpatest@SUFFIX@_la_SOURCES = \ - test/shared/tests_variable_definitions.F90 \ -diff -ruN elpa-2020.05.001/examples/C/Makefile_examples_hybrid elpa-2020.05.001_ok/examples/C/Makefile_examples_hybrid ---- elpa-2020.05.001/examples/C/Makefile_examples_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/C/Makefile_examples_hybrid 2020-06-25 09:35:05.174752000 +0200 -@@ -0,0 +1,21 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB_OPENMP) -lelpa_openmp -lelpatest_openmp $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -+ -+all: test_real_1stage_hybrid test_real_2stage_all_kernels_hybrid test_autotune_hybrid test_multiple_objs_hybrid -+ -+test_real_1stage_hybrid: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DWITH_OPENMP -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE) -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels_hybrid: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DWITH_OPENMP -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE) -o $@ test.c $(LIBS) -+ -+test_autotune_hybrid: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE) -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs_hybrid: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE) -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.05.001/examples/C/Makefile_examples_pure elpa-2020.05.001_ok/examples/C/Makefile_examples_pure ---- elpa-2020.05.001/examples/C/Makefile_examples_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/C/Makefile_examples_pure 2020-06-25 09:31:37.807696000 +0200 -@@ -0,0 +1,21 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs -+ -+test_real_1stage: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE) -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE) -o $@ test.c $(LIBS) -+ -+test_autotune: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.05.001/examples/C/Makefile_examples_pure_cuda elpa-2020.05.001_ok/examples/C/Makefile_examples_pure_cuda ---- elpa-2020.05.001/examples/C/Makefile_examples_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/C/Makefile_examples_pure_cuda 2020-06-25 09:33:39.050944000 +0200 -@@ -0,0 +1,21 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs -+ -+test_real_1stage: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE) -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE) -o $@ test.c $(LIBS) -+ -+test_autotune: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.05.001/examples/C/test_autotune.c elpa-2020.05.001_ok/examples/C/test_autotune.c ---- elpa-2020.05.001/examples/C/test_autotune.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/C/test_autotune.c 2020-06-25 09:21:55.801360000 +0200 -@@ -0,0 +1,335 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <string.h> -+#include <stdio.h> -+#include <stdlib.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+//#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+//#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+//#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# else -+# define MATRIX_TYPE complex float -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# else -+# define MATRIX_TYPE complex double -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+ -+#ifdef HAVE_64BIT_INTEGER_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#include "test/shared/generated.h" -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE status; -+ int error_elpa; -+ elpa_t handle; -+ -+ elpa_autotune_t autotune_handle; -+ C_INT_TYPE i, unfinished; -+ -+ C_INT_TYPE value; -+#ifdef WITH_MPI -+ MPI_Init(&argc, &argv); -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocs); -+ MPI_Comm_rank(MPI_COMM_WORLD, &myid); -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_real_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_real_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_complex_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_complex_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ handle = elpa_allocate(); -+#else -+ handle = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ /* Set parameters */ -+ elpa_set(handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (myid == 0) { -+ printf("Setting the matrix parameters na=%d, nev=%d \n",na,nev); -+ } -+ elpa_set(handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(handle)); -+ -+ elpa_set(handle, "gpu", 0, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ autotune_handle = elpa_autotune_setup(handle, ELPA_AUTOTUNE_FAST, ELPA_AUTOTUNE_DOMAIN_REAL, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ /* mimic 20 scf steps */ -+ -+ for (i=0; i < 20; i++) { -+ -+ unfinished = elpa_autotune_step(handle, autotune_handle, &error_elpa); -+ -+ if (unfinished == 0) { -+ if (myid == 0) { -+ printf("ELPA autotuning finished in the %d th scf step \n",i); -+ } -+ break; -+ } -+ if (myid == 0) { -+ printf("The current setting of the ELPA object: \n"); -+ elpa_print_settings(handle, &error_elpa); -+ -+ printf("The state of the autotuning: \n"); -+ elpa_autotune_print_state(handle, autotune_handle, &error_elpa); -+ } -+ -+ -+ /* Solve EV problem */ -+ elpa_eigenvectors(handle, a, ev, z, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* check the results */ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_real_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(double)); -+ -+#else -+ status = check_correctness_evp_numeric_residuals_real_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(float)); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_complex_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex double)); -+#else -+ status = check_correctness_evp_numeric_residuals_complex_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex float)); -+#endif -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ break; -+ } -+ printf("hier %d \n",myid); -+ } -+ -+ if (unfinished == 1) { -+ if (myid == 0) { -+ printf("ELPA autotuning did not finished during %d scf cycles\n",i); -+ -+ } -+ -+ } -+ elpa_autotune_set_best(handle, autotune_handle, &error_elpa); -+ -+ if (myid == 0) { -+ printf("The best combination found by the autotuning:\n"); -+ elpa_autotune_print_best(handle, autotune_handle, &error_elpa); -+ } -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_autotune_deallocate(autotune_handle); -+ elpa_deallocate(handle); -+#else -+ elpa_autotune_deallocate(autotune_handle, &error_elpa); -+ elpa_deallocate(handle, &error_elpa); -+#endif -+ elpa_uninit(&error_elpa); -+ -+ if (myid == 0) { -+ printf("\n"); -+ printf("2stage ELPA real solver complete\n"); -+ printf("\n"); -+ } -+ -+ if (status ==0){ -+ if (myid ==0) { -+ printf("All ok!\n"); -+ } -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.05.001/examples/C/test.c elpa-2020.05.001_ok/examples/C/test.c ---- elpa-2020.05.001/examples/C/test.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/C/test.c 2020-06-25 09:21:55.793130000 +0200 -@@ -0,0 +1,339 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <stdio.h> -+#include <stdlib.h> -+#include <string.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_GENERALIZED_DECOMP_EIGENPROBLEM -+#define TEST_GENERALIZED_EIGENPROBLEM -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_real_single_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_real_single_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_real_single_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_real_single_f -+# else -+# define MATRIX_TYPE complex float -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_complex_single_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_complex_single_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_complex_single_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_complex_single_f -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_real_double_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_real_double_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_real_double_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_real_double_f -+# else -+# define MATRIX_TYPE complex double -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_complex_double_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_complex_double_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_complex_double_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_complex_double_f -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#define C_INT_MPI_TYPE long int -+#else -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#define C_INT_MPI_TYPE int -+#endif -+#include "test/shared/generated.h" -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_MPI_TYPE myidMPI, nprocsMPI; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ C_INT_MPI_TYPE provided_mpi_thread_level; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z, *b, *bs; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE error, status; -+ int error_elpa; -+ -+ elpa_t handle; -+ -+ int value; -+#ifdef WITH_MPI -+#ifndef WITH_OPENMP -+ MPI_Init(&argc, &argv); -+#else -+ MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &provided_mpi_thread_level); -+ -+ if (provided_mpi_thread_level != MPI_THREAD_MULTIPLE) { -+ fprintf(stderr, "MPI ERROR: MPI_THREAD_MULTIPLE is not provided on this system\n"); -+ MPI_Finalize(); -+ exit(77); -+ } -+#endif -+ -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocsMPI); -+ nprocs = (C_INT_TYPE) nprocsMPI; -+ MPI_Comm_rank(MPI_COMM_WORLD, &myidMPI); -+ myid = (C_INT_TYPE) myidMPI; -+ -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+ PREPARE_MATRIX_RANDOM(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ b = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ bs = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ PREPARE_MATRIX_RANDOM_SPD(na, myid, na_rows, na_cols, sc_desc, b, z, bs, nblk, np_rows, np_cols, my_prow, my_pcol); -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+ handle = elpa_allocate(&error_elpa); -+ //assert_elpa_ok(error_elpa); -+ -+ /* Set parameters */ -+ elpa_set(handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (myid == 0) { -+ printf("Setting the matrix parameters na=%d, nev=%d \n",na,nev); -+ } -+ elpa_set(handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ elpa_set(handle, "blacs_context", (int) my_blacs_ctxt, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(handle)); -+ -+ /* Set tunables */ -+#ifdef TEST_SOLVER_1STAGE -+ elpa_set(handle, "solver", ELPA_SOLVER_1STAGE, &error_elpa); -+#else -+ elpa_set(handle, "solver", ELPA_SOLVER_2STAGE, &error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "gpu", TEST_GPU, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#if defined(TEST_SOLVE_2STAGE) && defined(TEST_KERNEL) -+# ifdef TEST_COMPLEX -+ elpa_set(handle, "complex_kernel", TEST_KERNEL, &error_elpa); -+# else -+ elpa_set(handle, "real_kernel", TEST_KERNEL, &error_elpa); -+# endif -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ elpa_get(handle, "solver", &value, &error_elpa); -+ if (myid == 0) { -+ printf("Solver is set to %d \n", value); -+ } -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ elpa_generalized_eigenvectors(handle, a, b, ev, z, 0, &error_elpa); -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ //a = as, so that the problem can be solved again -+ memcpy(a, as, na_rows * na_cols * sizeof(MATRIX_TYPE)); -+ elpa_generalized_eigenvectors(handle, a, b, ev, z, 1, &error_elpa); -+#endif -+#else -+ /* Solve EV problem */ -+ elpa_eigenvectors(handle, a, ev, z, &error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ elpa_deallocate(handle, &error_elpa); -+ elpa_uninit(&error_elpa); -+ -+ /* check the results */ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ status = CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs); -+#else -+ status = CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ } -+ if (status ==0){ -+ printf("All ok!\n"); -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ free(b); -+ free(bs); -+#endif -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.05.001/examples/C/test_multiple_objs.c elpa-2020.05.001_ok/examples/C/test_multiple_objs.c ---- elpa-2020.05.001/examples/C/test_multiple_objs.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/C/test_multiple_objs.c 2020-06-25 09:21:55.797454000 +0200 -@@ -0,0 +1,387 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <string.h> -+#include <stdio.h> -+#include <stdlib.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+//#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+//#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+//#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# else -+# define MATRIX_TYPE complex float -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# else -+# define MATRIX_TYPE complex double -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+#ifdef HAVE_64BIT_INTEGER_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#include "test/shared/generated.h" -+void set_basic_parameters(elpa_t *handle, C_INT_TYPE na, C_INT_TYPE nev, C_INT_TYPE na_rows, C_INT_TYPE na_cols, C_INT_TYPE nblk, C_INT_TYPE my_prow, C_INT_TYPE my_pcol){ -+ int error_elpa; -+ elpa_set(*handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(*handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+} -+ -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE status; -+ int error_elpa; -+ int gpu, timings, debug; -+ char str[400]; -+ -+ elpa_t elpa_handle_1, elpa_handle_2, *elpa_handle_ptr; -+ -+ elpa_autotune_t autotune_handle; -+ C_INT_TYPE i, unfinished; -+ -+ C_INT_TYPE value; -+#ifdef WITH_MPI -+ MPI_Init(&argc, &argv); -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocs); -+ MPI_Comm_rank(MPI_COMM_WORLD, &myid); -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_real_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_real_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_complex_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_complex_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+ elpa_handle_1 = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ set_basic_parameters(&elpa_handle_1, na, nev, na_rows, na_cols, nblk, my_prow, my_pcol); -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(elpa_handle_1)); -+ -+ elpa_set(elpa_handle_1, "gpu", 0, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(elpa_handle_1, "timings", 1, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(elpa_handle_1, "debug", 1, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_store_settings(elpa_handle_1, "initial_parameters.txt", &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ // barrier after store settings, file created from one MPI rank only, but loaded everywhere -+ MPI_Barrier(MPI_COMM_WORLD); -+#endif -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_handle_2 = elpa_allocate(); -+#else -+ elpa_handle_2 = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ set_basic_parameters(&elpa_handle_2, na, nev, na_rows, na_cols, nblk, my_prow, my_pcol); -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(elpa_handle_2)); -+ -+ elpa_load_settings(elpa_handle_2, "initial_parameters.txt", &error_elpa); -+ -+ elpa_get(elpa_handle_2, "gpu", &gpu, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_get(elpa_handle_2, "timings", &timings, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_get(elpa_handle_2, "debug", &debug, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if ((timings != 1) || (debug != 1) || (gpu != 0)){ -+ printf("Parameters not stored or loaded correctly. Aborting... %d, %d, %d\n", timings, debug, gpu); -+ exit(1); -+ } -+ -+ elpa_handle_ptr = &elpa_handle_2; -+ -+ autotune_handle = elpa_autotune_setup(*elpa_handle_ptr, ELPA_AUTOTUNE_FAST, ELPA_AUTOTUNE_DOMAIN_REAL, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ /* mimic 20 scf steps */ -+ -+ for (i=0; i < 20; i++) { -+ -+ unfinished = elpa_autotune_step(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ if (unfinished == 0) { -+ if (myid == 0) { -+ printf("ELPA autotuning finished in the %d th scf step \n",i); -+ } -+ break; -+ } -+ -+ elpa_print_settings(*elpa_handle_ptr, &error_elpa); -+ elpa_autotune_print_state(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ sprintf(str, "saved_parameters_%d.txt", i); -+ elpa_store_settings(*elpa_handle_ptr, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* Solve EV problem */ -+ elpa_eigenvectors(*elpa_handle_ptr, a, ev, z, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* check the results */ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_real_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(double)); -+ -+#else -+ status = check_correctness_evp_numeric_residuals_real_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(float)); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_complex_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex double)); -+#else -+ status = check_correctness_evp_numeric_residuals_complex_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex float)); -+#endif -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ break; -+ } -+ -+ elpa_autotune_print_state(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ sprintf(str, "saved_state_%d.txt", i); -+ elpa_autotune_save_state(*elpa_handle_ptr, autotune_handle, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ //barrier after save state, file created from one MPI rank only, but loaded everywhere -+ MPI_Barrier(MPI_COMM_WORLD); -+#endif -+ -+ elpa_autotune_load_state(*elpa_handle_ptr, autotune_handle, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (unfinished == 1) { -+ if (myid == 0) { -+ printf("ELPA autotuning did not finished during %d scf cycles\n",i); -+ } -+ } -+ -+ } -+ elpa_autotune_set_best(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ if (myid == 0) { -+ printf("The best combination found by the autotuning:\n"); -+ elpa_autotune_print_best(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ } -+ -+ elpa_autotune_deallocate(autotune_handle, &error_elpa); -+ elpa_deallocate(elpa_handle_1, &error_elpa); -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_deallocate(elpa_handle_2); -+#else -+ elpa_deallocate(elpa_handle_2, &error_elpa); -+#endif -+ elpa_uninit(&error_elpa); -+ -+ if (myid == 0) { -+ printf("\n"); -+ printf("2stage ELPA real solver complete\n"); -+ printf("\n"); -+ } -+ -+ if (status ==0){ -+ if (myid ==0) { -+ printf("All ok!\n"); -+ } -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.05.001/examples/Fortran/assert.h elpa-2020.05.001_ok/examples/Fortran/assert.h ---- elpa-2020.05.001/examples/Fortran/assert.h 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/assert.h 2020-06-25 09:22:05.752530000 +0200 -@@ -0,0 +1,7 @@ -+#define stringify_(x) "x" -+#define stringify(x) stringify_(x) -+#define assert(x) call x_a(x, stringify(x), "F", __LINE__) -+ -+#define assert_elpa_ok(error_code) call x_ao(error_code, stringify(error_code), __FILE__, __LINE__) -+ -+! vim: syntax=fortran -diff -ruN elpa-2020.05.001/examples/Fortran/elpa2/complex_2stage_banded.F90 elpa-2020.05.001_ok/examples/Fortran/elpa2/complex_2stage_banded.F90 ---- elpa-2020.05.001/examples/Fortran/elpa2/complex_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/elpa2/complex_2stage_banded.F90 2020-06-25 09:22:05.794030000 +0200 -@@ -0,0 +1,295 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 complex case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The complex ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "COMPLEX_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+program test_complex2_double_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - COMPLEX version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+#ifdef WITH_MPI -+ !TEST_INT_TYPE, external :: numroc -+#endif -+ complex(kind=ck8), parameter :: CZERO = (0.0_rk8,0.0_rk8), CONE = (1.0_rk8,0.0_rk8) -+ real(kind=rk8), allocatable :: ev(:) -+ -+ complex(kind=ck8), allocatable :: a(:,:), z(:,:), as(:,:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ type(output_t) :: write_to_file -+ integer(kind=c_int) :: error_elpa -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ -+ -+ TEST_INT_TYPE :: numberOfDevices -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+ -+#define COMPLEXCASE -+#define DOUBLE_PRECISION_COMPLEX 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - COMPLEX version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ ! Determine the necessary size of the distributed matrices, -+ ! we use the Scalapack tools routine NUMROC for that. -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0 -+ as(local_row, local_col) = 0 -+ end if -+ end do -+ end do -+ -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.05.001/examples/Fortran/elpa2/double_instance.F90 elpa-2020.05.001_ok/examples/Fortran/elpa2/double_instance.F90 ---- elpa-2020.05.001/examples/Fortran/elpa2/double_instance.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/elpa2/double_instance.F90 2020-06-25 09:22:05.776144000 +0200 -@@ -0,0 +1,244 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "../assert.h" -+ -+program test_interface -+ use elpa -+ -+ use precision_for_tests -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ real(kind=C_DOUBLE), allocatable :: a1(:,:), as1(:,:) -+ ! eigenvectors -+ real(kind=C_DOUBLE), allocatable :: z1(:,:) -+ ! eigenvalues -+ real(kind=C_DOUBLE), allocatable :: ev1(:) -+ -+ ! The Matrix -+ complex(kind=C_DOUBLE_COMPLEX), allocatable :: a2(:,:), as2(:,:) -+ ! eigenvectors -+ complex(kind=C_DOUBLE_COMPLEX), allocatable :: z2(:,:) -+ ! eigenvalues -+ real(kind=C_DOUBLE), allocatable :: ev2(:) -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ TEST_INT_TYPE :: solver -+ TEST_INT_TYPE :: qr -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e1, e2 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+ -+ status = 0 -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ -+ np_rows = nprocs/np_cols -+ -+ my_prow = mod(myid, np_cols) -+ my_pcol = myid / np_cols -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a1 (na_rows,na_cols), as1(na_rows,na_cols)) -+ allocate(z1 (na_rows,na_cols)) -+ allocate(ev1(na)) -+ -+ a1(:,:) = 0.0 -+ z1(:,:) = 0.0 -+ ev1(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a1, z1, as1) -+ allocate(a2 (na_rows,na_cols), as2(na_rows,na_cols)) -+ allocate(z2 (na_rows,na_cols)) -+ allocate(ev2(na)) -+ -+ a2(:,:) = 0.0 -+ z2(:,:) = 0.0 -+ ev2(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a2, z2, as2) -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e1 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e1%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ assert(e1%setup() .eq. ELPA_OK) -+ -+ call e1%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("real_kernel", ELPA_2STAGE_REAL_DEFAULT, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ e2 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e2%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e2%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ assert(e2%setup() .eq. ELPA_OK) -+ -+ call e2%set("solver", ELPA_SOLVER_1STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%eigenvectors(a1, ev1, z1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e2%eigenvectors(a2, ev2, z2, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e2, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as1, z1, ev1, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a1) -+ deallocate(as1) -+ deallocate(z1) -+ deallocate(ev1) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as2, z2, ev2, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a2) -+ deallocate(as2) -+ deallocate(z2) -+ deallocate(ev2) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+ -+ -+end program -diff -ruN elpa-2020.05.001/examples/Fortran/elpa2/real_2stage_banded.F90 elpa-2020.05.001_ok/examples/Fortran/elpa2/real_2stage_banded.F90 ---- elpa-2020.05.001/examples/Fortran/elpa2/real_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/elpa2/real_2stage_banded.F90 2020-06-25 09:22:05.800576000 +0200 -@@ -0,0 +1,294 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The real ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "REAL_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+program test_real2_double_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+ !TEST_INT_TYPE, external :: numroc -+ -+ real(kind=rk8), allocatable :: a(:,:), z(:,:), as(:,:), ev(:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ integer(kind=c_int) :: error_elpa -+ TEST_INT_TYPE :: numberOfDevices -+ type(output_t) :: write_to_file -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+#define DOUBLE_PRECISION_REAL 1 -+ -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+#define REALCASE -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - REAL version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g(local_row, nblk, my_prow, np_rows) -+ do local_col = 1, na_cols -+ global_col = index_l2g(local_col, nblk, my_pcol, np_cols) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0.0 -+ as(local_row, local_col) = 0.0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.05.001/examples/Fortran/elpa2/single_complex_2stage_banded.F90 elpa-2020.05.001_ok/examples/Fortran/elpa2/single_complex_2stage_banded.F90 ---- elpa-2020.05.001/examples/Fortran/elpa2/single_complex_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/elpa2/single_complex_2stage_banded.F90 2020-06-25 09:22:05.782890000 +0200 -@@ -0,0 +1,295 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 complex case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The complex ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "COMPLEX_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+program test_complex2_single_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - COMPLEX version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+#ifdef WITH_MPI -+ !TEST_INT_TYPE, external :: numroc -+#endif -+ complex(kind=ck4), parameter :: CZERO = (0.0_rk4,0.0_rk4), CONE = (1.0_rk4,0.0_rk4) -+ real(kind=rk4), allocatable :: ev(:) -+ -+ complex(kind=ck4), allocatable :: a(:,:), z(:,:), as(:,:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ type(output_t) :: write_to_file -+ integer(kind=ik) :: error_elpa -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ -+ -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+ -+#define COMPLEXCASE -+#define DOUBLE_PRECISION_COMPLEX 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - COMPLEX version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ ! Determine the necessary size of the distributed matrices, -+ ! we use the Scalapack tools routine NUMROC for that. -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0 -+ as(local_row, local_col) = 0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.05.001/examples/Fortran/elpa2/single_real_2stage_banded.F90 elpa-2020.05.001_ok/examples/Fortran/elpa2/single_real_2stage_banded.F90 ---- elpa-2020.05.001/examples/Fortran/elpa2/single_real_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/elpa2/single_real_2stage_banded.F90 2020-06-25 09:22:05.788644000 +0200 -@@ -0,0 +1,287 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The real ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "REAL_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+program test_real2_single_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ use tests_scalapack_interfaces -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ real(kind=rk4), allocatable :: a(:,:), z(:,:), as(:,:), ev(:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ integer(kind=c_int) :: error_elpa -+ type(output_t) :: write_to_file -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+#define DOUBLE_PRECISION_REAL 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ -+ STATUS = 0 -+ -+#define REALCASE -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - REAL version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0.0 -+ as(local_row, local_col) = 0.0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.05.001/examples/Fortran/elpa_print_headers.F90 elpa-2020.05.001_ok/examples/Fortran/elpa_print_headers.F90 ---- elpa-2020.05.001/examples/Fortran/elpa_print_headers.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/elpa_print_headers.F90 2020-06-25 09:22:05.748687000 +0200 -@@ -0,0 +1,273 @@ -+#if 0 -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+! ELPA1 -- Faster replacements for ScaLAPACK symmetric eigenvalue routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+#endif -+ -+#ifdef WITH_OPENMP -+ if (myid .eq. 0) then -+ print *,"Threaded version of test program" -+ print *,"Using ",omp_get_max_threads()," threads" -+ print *," " -+ endif -+#endif -+ -+#ifndef WITH_MPI -+ if (myid .eq. 0) then -+ print *,"This version of ELPA does not support MPI parallelisation" -+ print *,"For MPI support re-build ELPA with appropiate flags" -+ print *," " -+ endif -+#endif -+ -+#ifdef ELPA1 -+ -+#ifdef REALCASE -+#ifdef DOUBLE_PRECISION_REAL -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued double-precision version of ELPA1 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued single-precision version of ELPA1 is used" -+ print *," " -+ endif -+#endif -+ -+#endif -+ -+#ifdef COMPLEXCASE -+#ifdef DOUBLE_PRECISION_COMPLEX -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued double-precision version of ELPA1 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued single-precision version of ELPA1 is used" -+ print *," " -+ endif -+#endif -+ -+#endif /* DATATYPE */ -+ -+#else /* ELPA1 */ -+ -+#ifdef REALCASE -+#ifdef DOUBLE_PRECISION_REAL -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued double-precision version of ELPA2 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued single-precision version of ELPA2 is used" -+ print *," " -+ endif -+#endif -+ -+#endif -+ -+#ifdef COMPLEXCASE -+#ifdef DOUBLE_PRECISION_COMPLEX -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued double-precision version of ELPA2 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued single-precision version of ELPA2 is used" -+ print *," " -+ endif -+#endif -+ -+#endif /* DATATYPE */ -+ -+#endif /* ELPA1 */ -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+#ifdef HAVE_REDIRECT -+ if (check_redirect_environment_variable()) then -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Redirection of mpi processes is used" -+ print *," " -+ if (create_directories() .ne. 1) then -+ write(error_unit,*) "Unable to create directory for stdout and stderr!" -+ stop 1 -+ endif -+ endif -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call redirect_stdout(myid) -+ endif -+#endif -+ -+#ifndef ELPA1 -+ -+ if (myid .eq. 0) then -+ print *," " -+ print *,"This ELPA2 is build with" -+#ifdef WITH_GPU_KERNEL -+ print *,"GPU support" -+#endif -+ print *," " -+#ifdef REALCASE -+ -+#ifdef HAVE_AVX2 -+ -+#ifdef WITH_REAL_AVX_BLOCK2_KERNEL -+ print *,"AVX2 optimized kernel (2 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK4_KERNEL -+ print *,"AVX2 optimized kernel (4 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK6_KERNEL -+ print *,"AVX2 optimized kernel (6 blocking) for real matrices" -+#endif -+ -+#else /* no HAVE_AVX2 */ -+ -+#ifdef HAVE_AVX -+ -+#ifdef WITH_REAL_AVX_BLOCK2_KERNEL -+ print *,"AVX optimized kernel (2 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK4_KERNEL -+ print *,"AVX optimized kernel (4 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK6_KERNEL -+ print *,"AVX optimized kernel (6 blocking) for real matrices" -+#endif -+ -+#endif -+ -+#endif /* HAVE_AVX2 */ -+ -+ -+#ifdef WITH_REAL_GENERIC_KERNEL -+ print *,"GENERIC kernel for real matrices" -+#endif -+#ifdef WITH_REAL_GENERIC_SIMPLE_KERNEL -+ print *,"GENERIC SIMPLE kernel for real matrices" -+#endif -+#ifdef WITH_REAL_SSE_ASSEMBLY_KERNEL -+ print *,"SSE ASSEMBLER kernel for real matrices" -+#endif -+#ifdef WITH_REAL_BGP_KERNEL -+ print *,"BGP kernel for real matrices" -+#endif -+#ifdef WITH_REAL_BGQ_KERNEL -+ print *,"BGQ kernel for real matrices" -+#endif -+ -+#endif /* DATATYPE == REAL */ -+ -+#ifdef COMPLEXCASE -+ -+#ifdef HAVE_AVX2 -+ -+#ifdef WITH_COMPLEX_AVX_BLOCK2_KERNEL -+ print *,"AVX2 optimized kernel (2 blocking) for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_AVX_BLOCK1_KERNEL -+ print *,"AVX2 optimized kernel (1 blocking) for complex matrices" -+#endif -+ -+#else /* no HAVE_AVX2 */ -+ -+#ifdef HAVE_AVX -+ -+#ifdef WITH_COMPLEX_AVX_BLOCK2_KERNEL -+ print *,"AVX optimized kernel (2 blocking) for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_AVX_BLOCK1_KERNEL -+ print *,"AVX optimized kernel (1 blocking) for complex matrices" -+#endif -+ -+#endif -+ -+#endif /* HAVE_AVX2 */ -+ -+ -+#ifdef WITH_COMPLEX_GENERIC_KERNEL -+ print *,"GENERIC kernel for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_GENERIC_SIMPLE_KERNEL -+ print *,"GENERIC SIMPLE kernel for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_SSE_ASSEMBLY_KERNEL -+ print *,"SSE ASSEMBLER kernel for complex matrices" -+#endif -+ -+#endif /* DATATYPE == COMPLEX */ -+ -+ endif -+#endif /* ELPA1 */ -+ -+ if (write_to_file%eigenvectors) then -+ if (myid .eq. 0) print *,"Writing Eigenvectors to files" -+ endif -+ -+ if (write_to_file%eigenvalues) then -+ if (myid .eq. 0) print *,"Writing Eigenvalues to files" -+ endif -+ -+ -diff -ruN elpa-2020.05.001/examples/Fortran/Makefile_examples_hybrid elpa-2020.05.001_ok/examples/Fortran/Makefile_examples_hybrid ---- elpa-2020.05.001/examples/Fortran/Makefile_examples_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/Makefile_examples_hybrid 2020-06-25 09:36:09.683320000 +0200 -@@ -0,0 +1,25 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -qopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_MODULES) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+LIBS = -L$(ELPA_LIB) -lelpatest_openmp -lelpa_openmp $(SCALAPACK_LIB) $(MKL) -+#CC = mpicc -O3 -+ -+all: test_real_1stage_omp test_real_2stage_all_kernels_omp test_autotune_omp test_multiple_objs_omp test_split_comm_omp -+ -+test_real_1stage_omp: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP)/elpa -o test_real_1stage_omp.F90 test.F90 -+ $(F90) -o $@ test_real_1stage_omp.F90 $(LIBS) -+ -+test_real_2stage_all_kernels_omp: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP)/elpa -o test_real_2stage_all_kernels_omp.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels_omp.F90 $(LIBS) -+ -+test_autotune_omp: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs_omp: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm_omp: test_split_comm.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_split_comm.F90 $(LIBS) -diff -ruN elpa-2020.05.001/examples/Fortran/Makefile_examples_pure elpa-2020.05.001_ok/examples/Fortran/Makefile_examples_pure ---- elpa-2020.05.001/examples/Fortran/Makefile_examples_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/Makefile_examples_pure 2020-06-25 09:36:09.684371000 +0200 -@@ -0,0 +1,25 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+#CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs test_split_comm -+ -+test_real_1stage: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_1stage.F90 test.F90 -+ $(F90) -o $@ test_real_1stage.F90 $(LIBS) -+ -+test_real_2stage_all_kernels: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_2stage_all_kernels.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels.F90 $(LIBS) -+ -+test_autotune: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm: test_split_comm.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_split_comm.F90 $(LIBS) -diff -ruN elpa-2020.05.001/examples/Fortran/Makefile_examples_pure_cuda elpa-2020.05.001_ok/examples/Fortran/Makefile_examples_pure_cuda ---- elpa-2020.05.001/examples/Fortran/Makefile_examples_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/Makefile_examples_pure_cuda 2020-06-25 09:37:52.094001000 +0200 -@@ -0,0 +1,25 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -lcudart -+#CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs test_split_comm -+ -+test_real_1stage: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_1stage.F90 test.F90 -+ $(F90) -o $@ test_real_1stage.F90 $(LIBS) -+ -+test_real_2stage_all_kernels: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_2stage_all_kernels.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels.F90 $(LIBS) -+ -+test_autotune: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_GPU=1 -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_GPU=1 -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm: test_split_comm.F90 -+ $(F90) -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_split_comm.F90 $(LIBS) -diff -ruN elpa-2020.05.001/examples/Fortran/test_autotune.F90 elpa-2020.05.001_ok/examples/Fortran/test_autotune.F90 ---- elpa-2020.05.001/examples/Fortran/test_autotune.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/test_autotune.F90 2020-06-25 09:22:05.814320000 +0200 -@@ -0,0 +1,312 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+ class(elpa_autotune_t), pointer :: tune_state -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ as(:,:) = a(:,:) -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (layout .eq. 'C') then -+ call e%set("matrix_order",COLUMN_MAJOR_ORDER,error_elpa) -+ else -+ call e%set("matrix_order",ROW_MAJOR_ORDER,error_elpa) -+ endif -+ -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ call e%set("timings",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("debug",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("gpu", 0, error_elpa) -+ assert_elpa_ok(error_elpa) -+ !call e%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e%setup()) -+ -+ if (myid == 0) print *, "" -+ -+ tune_state => e%autotune_setup(ELPA_AUTOTUNE_FAST, AUTOTUNE_DOMAIN, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ iter=0 -+ do while (e%autotune_step(tune_state, error_elpa)) -+ assert_elpa_ok(error_elpa) -+ iter=iter+1 -+ write(iter_string,'(I5.5)') iter -+ !call e%print_settings() -+ !call e%store_settings("saved_parameters_"//trim(iter_string)//".txt") -+ call e%timer_start("eigenvectors: iteration "//trim(iter_string)) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("eigenvectors: iteration "//trim(iter_string)) -+ -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e%print_times("eigenvectors: iteration "//trim(iter_string)) -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ a(:,:) = as(:,:) -+ !call e%autotune_print_state(tune_state) -+ !call e%autotune_save_state(tune_state, "saved_state_"//trim(iter_string)//".txt") -+ end do -+ -+ ! set and print the autotuned-settings -+ call e%autotune_set_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "The best combination found by the autotuning:" -+ flush(output_unit) -+ call e%autotune_print_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ endif -+ ! de-allocate autotune object -+ call elpa_autotune_deallocate(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "Running once more time with the best found setting..." -+ endif -+ call e%timer_start("eigenvectors: best setting") -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("eigenvectors: best setting") -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e%print_times("eigenvectors: best setting") -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ -+ call elpa_deallocate(e,error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+end program -diff -ruN elpa-2020.05.001/examples/Fortran/test.F90 elpa-2020.05.001_ok/examples/Fortran/test.F90 ---- elpa-2020.05.001/examples/Fortran/test.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/test.F90 2020-06-25 09:22:05.807148000 +0200 -@@ -0,0 +1,915 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE) ^ defined(TEST_SCALAPACK_ALL) ^ defined(TEST_SCALAPACK_PART)) -+error: define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE or TEST_SCALAPACK_ALL or TEST_SCALAPACK_PART -+#endif -+ -+#ifdef TEST_SOLVER_1STAGE -+#ifdef TEST_ALL_KERNELS -+error: TEST_ALL_KERNELS cannot be defined for TEST_SOLVER_1STAGE -+#endif -+#ifdef TEST_KERNEL -+error: TEST_KERNEL cannot be defined for TEST_SOLVER_1STAGE -+#endif -+#endif -+ -+#ifdef TEST_SOLVER_2STAGE -+#if !(defined(TEST_KERNEL) ^ defined(TEST_ALL_KERNELS)) -+error: define either TEST_ALL_KERNELS or a valid TEST_KERNEL -+#endif -+#endif -+ -+#ifdef TEST_GENERALIZED_DECOMP_EIGENPROBLEM -+#define TEST_GENERALIZED_EIGENPROBLEM -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+#ifdef TEST_REAL -+#define KERNEL_KEY "real_kernel" -+#endif -+#ifdef TEST_COMPLEX -+#define KERNEL_KEY "complex_kernel" -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+#ifdef WITH_SCALAPACK_TESTS -+ use test_scalapack -+#endif -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+#ifdef WITH_OPENMP -+ use omp_lib -+#endif -+ use precision_for_tests -+ -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_MPI_TYPE :: myidMPI, nprocsMPI -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ MATRIX_TYPE, allocatable :: b(:,:), c(:,:) -+#endif -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ MATRIX_TYPE, allocatable :: b(:,:), bs(:,:) -+#endif -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ logical :: check_all_evals, skip_check_correctness -+ -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ EV_TYPE, allocatable :: d(:), sd(:), ds(:), sds(:) -+ EV_TYPE :: diagonalELement, subdiagonalElement -+#endif -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+#ifdef TEST_ALL_KERNELS -+ TEST_INT_TYPE :: i -+#endif -+#ifdef TEST_ALL_LAYOUTS -+ character(len=1), parameter :: layouts(2) = [ 'C', 'R' ] -+ TEST_INT_TYPE :: i_layout -+#endif -+ integer(kind=c_int):: kernel -+ character(len=1) :: layout -+ logical :: do_test_numeric_residual, do_test_numeric_residual_generalized, & -+ do_test_analytic_eigenvalues, & -+ do_test_analytic_eigenvalues_eigenvectors, & -+ do_test_frank_eigenvalues, & -+ do_test_toeplitz_eigenvalues, do_test_cholesky, & -+ do_test_hermitian_multiply -+ -+#ifdef WITH_OPENMP -+ TEST_INT_TYPE :: max_threads, threads_caller -+#endif -+ -+#ifdef SPLIT_COMM_MYSELF -+ TEST_INT_MPI_TYPE :: mpi_comm_rows, mpi_comm_cols, mpi_string_length, mpierr2 -+ character(len=MPI_MAX_ERROR_STRING) :: mpierr_string -+#endif -+ -+ call read_input_parameters_traditional(na, nev, nblk, write_to_file, skip_check_correctness) -+ call setup_mpi(myid, nprocs) -+ -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ check_all_evals = .true. -+ -+ -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .false. -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ -+ do_test_cholesky = .false. -+#if defined(TEST_CHOLESKY) -+ do_test_cholesky = .true. -+#endif -+ do_test_hermitian_multiply = .false. -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ do_test_hermitian_multiply = .true. -+#endif -+ -+ status = 0 -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Program ' // TEST_CASE -+ print *, "" -+ endif -+ -+#ifdef TEST_ALL_LAYOUTS -+ do i_layout = 1, size(layouts) ! layouts -+ layout = layouts(i_layout) -+ do np_cols = 1, nprocs ! factors -+ if (mod(nprocs,np_cols) /= 0 ) then -+ cycle -+ endif -+#else -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+#endif -+ -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+#if TEST_QR_DECOMPOSITION == 1 -+ -+#if TEST_GPU == 1 -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+#endif /* TEST_GPU */ -+ if (nblk .lt. 64) then -+ if (myid .eq. 0) then -+ print *,"At the moment QR decomposition need blocksize of at least 64" -+ endif -+ if ((na .lt. 64) .and. (myid .eq. 0)) then -+ print *,"This is why the matrix size must also be at least 64 or only 1 MPI task can be used" -+ endif -+ -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+ endif -+#endif /* TEST_QR_DECOMPOSITION */ -+ -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, & -+ np_cols, layout, my_blacs_ctxt, my_prow, & -+ my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+ allocate(b (na_rows,na_cols)) -+ allocate(c (na_rows,na_cols)) -+#endif -+ -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ allocate(b (na_rows,na_cols)) -+ allocate(bs (na_rows,na_cols)) -+#endif -+ -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ allocate(d (na), ds(na)) -+ allocate(sd (na), sds(na)) -+#endif -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+#if defined(TEST_MATRIX_RANDOM) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) && !defined(TEST_EIGENVALUES) -+ ! the random matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! RANDOM + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! RANDOM + TEST_CHOLESKY: wee need SPD matrix -+ ! RANDOM + TEST_EIGENVALUES: no correctness check known -+ -+ ! We also have to take care of special case in TEST_EIGENVECTORS -+#if !defined(TEST_EIGENVECTORS) -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+#else /* TEST_EIGENVECTORS */ -+ if (nev .ge. 1) then -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_numeric_residual = .true. -+#endif -+ else -+ if (myid .eq. 0) then -+ print *,"At the moment with the random matrix you need nev >=1" -+ endif -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+ endif -+#endif /* TEST_EIGENVECTORS */ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* (TEST_MATRIX_RANDOM) */ -+ -+#if defined(TEST_MATRIX_RANDOM) && defined(TEST_CHOLESKY) -+ call prepare_matrix_random_spd(na, myid, sc_desc, a, z, as, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* TEST_MATRIX_RANDOM and TEST_CHOLESKY */ -+ -+#if defined(TEST_MATRIX_RANDOM) && defined(TEST_GENERALIZED_EIGENPROBLEM) -+ ! call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ call prepare_matrix_random_spd(na, myid, sc_desc, b, z, bs, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .true. -+#endif /* TEST_MATRIX_RANDOM and TEST_GENERALIZED_EIGENPROBLEM */ -+ -+#if defined(TEST_MATRIX_RANDOM) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_EIGENVALUES)) -+#error "Random matrix is not allowed in this configuration" -+#endif -+ -+#if defined(TEST_MATRIX_ANALYTIC) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) -+ ! the analytic matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! ANALYTIC + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! ANALTIC + TEST_CHOLESKY: no correctness check yet implemented -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ as(:,:) = a -+ -+ do_test_numeric_residual = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_analytic_eigenvalues = .true. -+#endif -+#if defined(TEST_EIGENVECTORS) -+ if (nev .ge. 1) then -+ do_test_analytic_eigenvalues_eigenvectors = .true. -+ do_test_analytic_eigenvalues = .false. -+ else -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ endif -+#endif -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* TEST_MATRIX_ANALYTIC */ -+#if defined(TEST_MATRIX_ANALYTIC) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_CHOLESKY)) -+#error "Analytic matrix is not allowd in this configuration" -+#endif -+ -+#if defined(TEST_MATRIX_TOEPLITZ) -+ ! The Toeplitz matrix works in each test -+#ifdef TEST_SINGLE -+ diagonalElement = 0.45_c_float -+ subdiagonalElement = 0.78_c_float -+#else -+ diagonalElement = 0.45_c_double -+ subdiagonalElement = 0.78_c_double -+#endif -+ -+! actually we test cholesky for diagonal matrix only -+#if defined(TEST_CHOLESKY) -+#ifdef TEST_SINGLE -+ diagonalElement = (2.546_c_float, 0.0_c_float) -+ subdiagonalElement = (0.0_c_float, 0.0_c_float) -+#else -+ diagonalElement = (2.546_c_double, 0.0_c_double) -+ subdiagonalElement = (0.0_c_double, 0.0_c_double) -+#endif -+#endif /* TEST_CHOLESKY */ -+ -+ call prepare_matrix_toeplitz(na, diagonalElement, subdiagonalElement, & -+ d, sd, ds, sds, a, as, nblk, np_rows, & -+ np_cols, my_prow, my_pcol) -+ -+ -+ do_test_numeric_residual = .false. -+#if defined(TEST_EIGENVECTORS) -+ if (nev .ge. 1) then -+ do_test_numeric_residual = .true. -+ else -+ do_test_numeric_residual = .false. -+ endif -+#endif -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+#if defined(TEST_CHOLESKY) -+ do_test_toeplitz_eigenvalues = .false. -+#else -+ do_test_toeplitz_eigenvalues = .true. -+#endif -+ -+#endif /* TEST_MATRIX_TOEPLITZ */ -+ -+ -+#if defined(TEST_MATRIX_FRANK) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) -+ ! the random matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! FRANK + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! FRANK + TEST_CHOLESKY: no correctness check yet implemented -+ -+ ! We also have to take care of special case in TEST_EIGENVECTORS -+#if !defined(TEST_EIGENVECTORS) -+ call prepare_matrix_frank(na, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ -+#else /* TEST_EIGENVECTORS */ -+ -+ if (nev .ge. 1) then -+ call prepare_matrix_frank(na, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ else -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ -+ endif -+ -+#endif /* TEST_EIGENVECTORS */ -+#endif /* (TEST_MATRIX_FRANK) */ -+#if defined(TEST_MATRIX_FRANK) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_CHOLESKY)) -+#error "FRANK matrix is not allowed in this configuration" -+#endif -+ -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+#ifdef TEST_REAL -+ -+#ifdef TEST_DOUBLE -+ b(:,:) = 2.0_c_double * a(:,:) -+ c(:,:) = 0.0_c_double -+#else -+ b(:,:) = 2.0_c_float * a(:,:) -+ c(:,:) = 0.0_c_float -+#endif -+ -+#endif /* TEST_REAL */ -+ -+#ifdef TEST_COMPLEX -+ -+#ifdef TEST_DOUBLE -+ b(:,:) = 2.0_c_double * a(:,:) -+ c(:,:) = (0.0_c_double, 0.0_c_double) -+#else -+ b(:,:) = 2.0_c_float * a(:,:) -+ c(:,:) = (0.0_c_float, 0.0_c_float) -+#endif -+ -+#endif /* TEST_COMPLEX */ -+ -+#endif /* TEST_HERMITIAN_MULTIPLY */ -+ -+! if the test is used for (repeated) performacne tests, one might want to skip the checking -+! of the results, which might be time-consuming and not necessary. -+ if(skip_check_correctness) then -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .false. -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_cholesky = .false. -+ endif -+ -+ -+#ifdef WITH_OPENMP -+ threads_caller = omp_get_max_threads() -+ if (myid == 0) then -+ print *,"The calling program uses ",threads_caller," threads" -+ endif -+#endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (layout .eq. 'C') then -+ call e%set("matrix_order",COLUMN_MAJOR_ORDER,error_elpa) -+ else -+ call e%set("matrix_order",ROW_MAJOR_ORDER,error_elpa) -+ endif -+ -+#ifdef WITH_MPI -+#ifdef SPLIT_COMM_MYSELF -+ call mpi_comm_split(MPI_COMM_WORLD, int(my_pcol,kind=MPI_KIND), int(my_prow,kind=MPI_KIND), & -+ mpi_comm_rows, mpierr) -+ if (mpierr .ne. MPI_SUCCESS) then -+ call MPI_ERROR_STRING(mpierr, mpierr_string, mpi_string_length, mpierr2) -+ write(error_unit,*) "MPI ERROR occured during mpi_comm_split for row communicator: ", trim(mpierr_string) -+ stop 1 -+ endif -+ -+ call mpi_comm_split(MPI_COMM_WORLD, int(my_prow,kind=MPI_KIND), int(my_pcol,kind=MPI_KIND), & -+ mpi_comm_cols, mpierr) -+ if (mpierr .ne. MPI_SUCCESS) then -+ call MPI_ERROR_STRING(mpierr,mpierr_string, mpi_string_length, mpierr2) -+ write(error_unit,*) "MPI ERROR occured during mpi_comm_split for col communicator: ", trim(mpierr_string) -+ stop 1 -+ endif -+ -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("mpi_comm_rows", int(mpi_comm_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("mpi_comm_cols", int(mpi_comm_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#else -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ call e%set("blacs_context", int(my_blacs_ctxt,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ call e%set("timings", 1_ik, error_elpa) -+ assert_elpa_ok(e%setup()) -+ -+#ifdef TEST_SOLVER_1STAGE -+ call e%set("solver", ELPA_SOLVER_1STAGE, error_elpa) -+#else -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+#endif -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("gpu", TEST_GPU, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%set("qr", 1_ik, error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+#ifdef WITH_OPENMP -+ max_threads=omp_get_max_threads() -+ call e%set("omp_threads", int(max_threads,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ if (myid == 0) print *, "" -+ -+#ifdef TEST_ALL_KERNELS -+ do i = 0, elpa_option_cardinality(KERNEL_KEY) ! kernels -+ if (TEST_GPU .eq. 0) then -+ kernel = elpa_option_enumerate(KERNEL_KEY, int(i,kind=c_int)) -+ if (kernel .eq. ELPA_2STAGE_REAL_GPU) continue -+ if (kernel .eq. ELPA_2STAGE_COMPLEX_GPU) continue -+ endif -+#endif -+#ifdef TEST_KERNEL -+ kernel = TEST_KERNEL -+#endif -+ -+#ifdef TEST_SOLVER_2STAGE -+#if TEST_GPU == 1 -+#if defined TEST_REAL -+ kernel = ELPA_2STAGE_REAL_GPU -+#endif -+#if defined TEST_COMPLEX -+ kernel = ELPA_2STAGE_COMPLEX_GPU -+#endif -+#endif -+ call e%set(KERNEL_KEY, kernel, error_elpa) -+#ifdef TEST_KERNEL -+ assert_elpa_ok(error_elpa) -+#else -+ if (error_elpa /= ELPA_OK) then -+ cycle -+ endif -+ ! actually used kernel might be different if forced via environment variables -+ call e%get(KERNEL_KEY, kernel, error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ if (myid == 0) then -+ print *, elpa_int_value_to_string(KERNEL_KEY, kernel) // " kernel" -+ endif -+#endif -+ -+ -+! print all parameters -+ call e%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef TEST_ALL_KERNELS -+ call e%timer_start(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#endif -+ -+ ! The actual solve step -+#if defined(TEST_EIGENVECTORS) -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%timer_start("e%eigenvectors_qr()") -+#else -+ call e%timer_start("e%eigenvectors()") -+#endif -+#ifdef TEST_SCALAPACK_ALL -+ call solve_scalapack_all(na, a, sc_desc, ev, z) -+#elif TEST_SCALAPACK_PART -+ call solve_scalapack_part(na, a, sc_desc, nev, ev, z) -+ check_all_evals = .false. ! scalapack does not compute all eigenvectors -+#else -+ call e%eigenvectors(a, ev, z, error_elpa) -+#endif -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%timer_stop("e%eigenvectors_qr()") -+#else -+ call e%timer_stop("e%eigenvectors()") -+#endif -+#endif /* TEST_EIGENVECTORS */ -+ -+#ifdef TEST_EIGENVALUES -+ call e%timer_start("e%eigenvalues()") -+ call e%eigenvalues(a, ev, error_elpa) -+ call e%timer_stop("e%eigenvalues()") -+#endif -+ -+#if defined(TEST_SOLVE_TRIDIAGONAL) -+ call e%timer_start("e%solve_tridiagonal()") -+ call e%solve_tridiagonal(d, sd, z, error_elpa) -+ call e%timer_stop("e%solve_tridiagonal()") -+ ev(:) = d(:) -+#endif -+ -+#if defined(TEST_CHOLESKY) -+ call e%timer_start("e%cholesky()") -+ call e%cholesky(a, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("e%cholesky()") -+#endif -+ -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ call e%timer_start("e%hermitian_multiply()") -+ call e%hermitian_multiply('F','F', int(na,kind=c_int), a, b, int(na_rows,kind=c_int), & -+ int(na_cols,kind=c_int), c, int(na_rows,kind=c_int), & -+ int(na_cols,kind=c_int), error_elpa) -+ call e%timer_stop("e%hermitian_multiply()") -+#endif -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ call e%timer_start("e%generalized_eigenvectors()") -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ call e%timer_start("is_already_decomposed=.false.") -+#endif -+ call e%generalized_eigenvectors(a, b, ev, z, .false., error_elpa) -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ call e%timer_stop("is_already_decomposed=.false.") -+ a = as -+ call e%timer_start("is_already_decomposed=.true.") -+ call e%generalized_eigenvectors(a, b, ev, z, .true., error_elpa) -+ call e%timer_stop("is_already_decomposed=.true.") -+#endif -+ call e%timer_stop("e%generalized_eigenvectors()") -+#endif -+ -+ assert_elpa_ok(error_elpa) -+ -+#ifdef TEST_ALL_KERNELS -+ call e%timer_stop(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#endif -+ -+ if (myid .eq. 0) then -+#ifdef TEST_ALL_KERNELS -+ call e%print_times(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#else /* TEST_ALL_KERNELS */ -+ -+#if defined(TEST_EIGENVECTORS) -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%print_times("e%eigenvectors_qr()") -+#else -+ call e%print_times("e%eigenvectors()") -+#endif -+#endif -+#ifdef TEST_EIGENVALUES -+ call e%print_times("e%eigenvalues()") -+#endif -+#ifdef TEST_SOLVE_TRIDIAGONAL -+ call e%print_times("e%solve_tridiagonal()") -+#endif -+#ifdef TEST_CHOLESKY -+ call e%print_times("e%cholesky()") -+#endif -+#ifdef TEST_HERMITIAN_MULTIPLY -+ call e%print_times("e%hermitian_multiply()") -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ call e%print_times("e%generalized_eigenvectors()") -+#endif -+#endif /* TEST_ALL_KERNELS */ -+ endif -+ -+ if (do_test_analytic_eigenvalues) then -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, & -+ my_prow, my_pcol, check_all_evals, .false.) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_analytic_eigenvalues_eigenvectors) then -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, & -+ my_prow, my_pcol, check_all_evals, .true.) -+ call check_status(status, myid) -+ endif -+ -+ if(do_test_numeric_residual) then -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, & -+ np_rows,np_cols, my_prow, my_pcol) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_frank_eigenvalues) then -+ status = check_correctness_eigenvalues_frank(na, ev, z, myid) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_toeplitz_eigenvalues) then -+#if defined(TEST_EIGENVALUES) || defined(TEST_SOLVE_TRIDIAGONAL) -+ status = check_correctness_eigenvalues_toeplitz(na, diagonalElement, & -+ subdiagonalElement, ev, z, myid) -+ call check_status(status, myid) -+#endif -+ endif -+ -+ if (do_test_cholesky) then -+ status = check_correctness_cholesky(na, a, as, na_rows, sc_desc, myid ) -+ call check_status(status, myid) -+ endif -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+ if (do_test_hermitian_multiply) then -+ status = check_correctness_hermitian_multiply(na, a, b, c, na_rows, sc_desc, myid ) -+ call check_status(status, myid) -+ endif -+#endif -+ -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ if(do_test_numeric_residual_generalized) then -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, & -+ np_cols, my_prow, & -+ my_pcol, bs) -+ call check_status(status, myid) -+ endif -+#endif -+ -+ -+#ifdef WITH_OPENMP -+ if (threads_caller .ne. omp_get_max_threads()) then -+ if (myid .eq. 0) then -+ print *, " ERROR! the number of OpenMP threads has not been restored correctly" -+ endif -+ status = 1 -+ endif -+#endif -+ if (myid == 0) then -+ print *, "" -+ endif -+ -+#ifdef TEST_ALL_KERNELS -+ a(:,:) = as(:,:) -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ d = ds -+ sd = sds -+#endif -+ end do ! kernels -+#endif -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+#ifdef TEST_HERMITIAN_MULTIPLY -+ deallocate(b) -+ deallocate(c) -+#endif -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ deallocate(d, ds) -+ deallocate(sd, sds) -+#endif -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ deallocate(b, bs) -+#endif -+ -+#ifdef TEST_ALL_LAYOUTS -+ end do ! factors -+ end do ! layouts -+#endif -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ -+ contains -+ -+ subroutine check_status(status, myid) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: status, myid -+ TEST_INT_MPI_TYPE :: mpierr -+ if (status /= 0) then -+ if (myid == 0) print *, "Result incorrect!" -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.05.001/examples/Fortran/test_multiple_objs.F90 elpa-2020.05.001_ok/examples/Fortran/test_multiple_objs.F90 ---- elpa-2020.05.001/examples/Fortran/test_multiple_objs.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/test_multiple_objs.F90 2020-06-25 09:22:05.742700000 +0200 -@@ -0,0 +1,379 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+ -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_TYPE :: ierr -+ TEST_INT_MPI_TYPE :: mpierr -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e1, e2, e_ptr -+ class(elpa_autotune_t), pointer :: tune_state -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ TEST_INT_TYPE :: timings, debug, gpu -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ as(:,:) = a(:,:) -+ -+ e1 => elpa_allocate(error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+ call set_basic_params(e1, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e1%set("timings",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("debug",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("gpu", 0, error_elpa) -+ assert_elpa_ok(error_elpa) -+ !call e1%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e1%setup()) -+ -+ call e1%store_settings("initial_parameters.txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ ! barrier after store settings, file created from one MPI rank only, but loaded everywhere -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ -+ ! try to load parameters into another object -+ e2 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call set_basic_params(e2, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ call e2%load_settings("initial_parameters.txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert_elpa_ok(e2%setup()) -+ -+ ! test whether the user setting of e1 are correctly loade to e2 -+ call e2%get("timings", int(timings,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%get("debug", int(debug,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%get("gpu", int(gpu,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if ((timings .ne. 1) .or. (debug .ne. 1) .or. (gpu .ne. 0)) then -+ print *, "Parameters not stored or loaded correctly. Aborting...", timings, debug, gpu -+ stop 1 -+ endif -+ -+ if(myid == 0) print *, "parameters of e1" -+ call e1%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if(myid == 0) print *, "" -+ if(myid == 0) print *, "parameters of e2" -+ call e2%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ e_ptr => e2 -+ -+ -+ tune_state => e_ptr%autotune_setup(ELPA_AUTOTUNE_FAST, AUTOTUNE_DOMAIN, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ iter=0 -+ do while (e_ptr%autotune_step(tune_state, error_elpa)) -+ assert_elpa_ok(error_elpa) -+ -+ iter=iter+1 -+ write(iter_string,'(I5.5)') iter -+ call e_ptr%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%store_settings("saved_parameters_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%timer_start("eigenvectors: iteration "//trim(iter_string)) -+ call e_ptr%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e_ptr%timer_stop("eigenvectors: iteration "//trim(iter_string)) -+ -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e_ptr%print_times("eigenvectors: iteration "//trim(iter_string)) -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ a(:,:) = as(:,:) -+ call e_ptr%autotune_print_state(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%autotune_save_state(tune_state, "saved_state_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ ! barrier after save state, file created from one MPI rank only, but loaded everywhere -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call e_ptr%autotune_load_state(tune_state, "saved_state_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ end do -+ -+ ! set and print the autotuned-settings -+ call e_ptr%autotune_set_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "The best combination found by the autotuning:" -+ flush(output_unit) -+ call e_ptr%autotune_print_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ endif -+ ! de-allocate autotune object -+ call elpa_autotune_deallocate(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "Running once more time with the best found setting..." -+ endif -+ call e_ptr%timer_start("eigenvectors: best setting") -+ call e_ptr%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%timer_stop("eigenvectors: best setting") -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e_ptr%print_times("eigenvectors: best setting") -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ -+ call elpa_deallocate(e_ptr, error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol -+ -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call elpa%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.05.001/examples/Fortran/test_skewsymmetric.F90 elpa-2020.05.001_ok/examples/Fortran/test_skewsymmetric.F90 ---- elpa-2020.05.001/examples/Fortran/test_skewsymmetric.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/test_skewsymmetric.F90 2020-06-25 09:22:05.758265000 +0200 -@@ -0,0 +1,400 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# define EV_TYPE_COMPLEX complex(kind=C_FLOAT_COMPLEX) -+# define MATRIX_TYPE_COMPLEX complex(kind=C_FLOAT_COMPLEX) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define MATRIX_TYPE_COMPLEX complex(kind=C_DOUBLE_COMPLEX) -+# define EV_TYPE_COMPLEX complex(kind=C_DOUBLE_COMPLEX) -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use precision_for_tests -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a_skewsymmetric(:,:), as_skewsymmetric(:,:) -+ MATRIX_TYPE_COMPLEX, allocatable :: a_complex(:,:), as_complex(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z_skewsymmetric(:,:) -+ MATRIX_TYPE_COMPLEX, allocatable :: z_complex(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev_skewsymmetric(:), ev_complex(:) -+ -+ TEST_INT_TYPE :: status, i, j -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e_complex, e_skewsymmetric -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+! -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, & -+ np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a_skewsymmetric (na_rows,na_cols)) -+ allocate(as_skewsymmetric(na_rows,na_cols)) -+ allocate(z_skewsymmetric (na_rows,2*na_cols)) -+ allocate(ev_skewsymmetric(na)) -+ -+ a_skewsymmetric(:,:) = 0.0 -+ z_skewsymmetric(:,:) = 0.0 -+ ev_skewsymmetric(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a_skewsymmetric, & -+ z_skewsymmetric(:,1:na_cols), as_skewsymmetric, is_skewsymmetric=1) -+ -+ !call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ as_skewsymmetric(:,:) = a_skewsymmetric(:,:) -+ -+ -+ ! prepare the complex matrix for the "brute force" case -+ allocate(a_complex (na_rows,na_cols)) -+ allocate(as_complex(na_rows,na_cols)) -+ allocate(z_complex (na_rows,na_cols)) -+ allocate(ev_complex(na)) -+ -+ a_complex(1:na_rows,1:na_cols) = 0.0 -+ z_complex(1:na_rows,1:na_cols) = 0.0 -+ as_complex(1:na_rows,1:na_cols) = 0.0 -+ -+ -+ do j=1, na_cols -+ do i=1,na_rows -+#ifdef TEST_DOUBLE -+ a_complex(i,j) = dcmplx(0.0, a_skewsymmetric(i,j)) -+#endif -+#ifdef TEST_SINGLE -+ a_complex(i,j) = cmplx(0.0, a_skewsymmetric(i,j)) -+#endif -+ enddo -+ enddo -+ -+ -+ -+ z_complex(1:na_rows,1:na_cols) = a_complex(1:na_rows,1:na_cols) -+ as_complex(1:na_rows,1:na_cols) = a_complex(1:na_rows,1:na_cols) -+ -+ ! first set up and solve the brute force problem -+ e_complex => elpa_allocate(error_elpa) -+ call set_basic_params(e_complex, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e_complex%set("timings",1, error_elpa) -+ -+ call e_complex%set("debug",1,error_elpa) -+ call e_complex%set("gpu", 0,error_elpa) -+ call e_complex%set("omp_threads", 8, error_elpa) -+ -+ assert_elpa_ok(e_complex%setup()) -+ call e_complex%set("solver", elpa_solver_2stage, error_elpa) -+ -+ call e_complex%timer_start("eigenvectors: brute force as complex matrix") -+ call e_complex%eigenvectors(a_complex, ev_complex, z_complex, error_elpa) -+ call e_complex%timer_stop("eigenvectors: brute force as complex matrix") -+ -+ if (myid .eq. 0) then -+ print *, "" -+ call e_complex%print_times("eigenvectors: brute force as complex matrix") -+ endif -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+! as_complex(:,:) = z_complex(:,:) -+#ifdef TEST_SINGLE -+ status = check_correctness_evp_numeric_residuals_complex_single(na, nev, as_complex, z_complex, ev_complex, sc_desc, & -+ nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#else -+ status = check_correctness_evp_numeric_residuals_complex_double(na, nev, as_complex, z_complex, ev_complex, sc_desc, & -+ nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#endif -+ status = 0 -+ call check_status(status, myid) -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ ! now run the skewsymmetric case -+ e_skewsymmetric => elpa_allocate(error_elpa) -+ call set_basic_params(e_skewsymmetric, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e_skewsymmetric%set("timings",1, error_elpa) -+ -+ call e_skewsymmetric%set("debug",1,error_elpa) -+ call e_skewsymmetric%set("gpu", 0,error_elpa) -+ call e_skewsymmetric%set("omp_threads",8, error_elpa) -+ -+ assert_elpa_ok(e_skewsymmetric%setup()) -+ -+ call e_skewsymmetric%set("solver", elpa_solver_2stage, error_elpa) -+ -+ call e_skewsymmetric%timer_start("eigenvectors: skewsymmetric ") -+ call e_skewsymmetric%skew_eigenvectors(a_skewsymmetric, ev_skewsymmetric, z_skewsymmetric, error_elpa) -+ call e_skewsymmetric%timer_stop("eigenvectors: skewsymmetric ") -+ -+ if (myid .eq. 0) then -+ print *, "" -+ call e_skewsymmetric%print_times("eigenvectors: skewsymmetric") -+ endif -+ -+ ! check eigenvalues -+ do i=1, na -+ if (myid == 0) then -+#ifdef TEST_DOUBLE -+ if (abs(ev_complex(i)-ev_skewsymmetric(i))/abs(ev_complex(i)) .gt. 1e-10) then -+#endif -+#ifdef TEST_SINGLE -+ if (abs(ev_complex(i)-ev_skewsymmetric(i))/abs(ev_complex(i)) .gt. 1e-4) then -+#endif -+ print *,"ev: i=",i,ev_complex(i),ev_skewsymmetric(i) -+ status = 1 -+ endif -+ endif -+ enddo -+ -+ -+! call check_status(status, myid) -+ -+ z_complex(:,:) = 0 -+ do j=1, na_cols -+ do i=1,na_rows -+#ifdef TEST_DOUBLE -+ z_complex(i,j) = dcmplx(z_skewsymmetric(i,j), z_skewsymmetric(i,na_cols+j)) -+#endif -+#ifdef TEST_SINGLE -+ z_complex(i,j) = cmplx(z_skewsymmetric(i,j), z_skewsymmetric(i,na_cols+j)) -+#endif -+ enddo -+ enddo -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ -+#ifdef TEST_SINGLE -+ status = check_correctness_evp_numeric_residuals_ss_real_single(na, nev, as_skewsymmetric, z_complex, ev_skewsymmetric, & -+ sc_desc, nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#else -+ status = check_correctness_evp_numeric_residuals_ss_real_double(na, nev, as_skewsymmetric, z_complex, ev_skewsymmetric, & -+ sc_desc, nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#endif -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call elpa_deallocate(e_complex,error_elpa) -+ call elpa_deallocate(e_skewsymmetric,error_elpa) -+ -+ -+ !to do -+ ! - check whether brute-force check_correctness_evp_numeric_residuals worsk (complex ev) -+ ! - invent a test for skewsymmetric residuals -+ -+ deallocate(a_complex) -+ deallocate(as_complex) -+ deallocate(z_complex) -+ deallocate(ev_complex) -+ -+ deallocate(a_skewsymmetric) -+ deallocate(as_skewsymmetric) -+ deallocate(z_skewsymmetric) -+ deallocate(ev_skewsymmetric) -+ call elpa_uninit(error_elpa) -+ -+ -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol -+ -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call elpa%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ subroutine check_status(status, myid) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: status, myid -+ TEST_INT_MPI_TYPE :: mpierr -+ if (status /= 0) then -+ if (myid == 0) print *, "Result incorrect!" -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ endif -+ end subroutine -+end program -diff -ruN elpa-2020.05.001/examples/Fortran/test_split_comm.F90 elpa-2020.05.001_ok/examples/Fortran/test_split_comm.F90 ---- elpa-2020.05.001/examples/Fortran/test_split_comm.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Fortran/test_split_comm.F90 2020-06-25 09:22:05.765048000 +0200 -@@ -0,0 +1,340 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ TEST_INT_TYPE :: num_groups, group_size, color, key -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr, ierr,mpi_sub_commMPI, myidMPI, nprocsMPI, colorMPI, keyMPI, & -+ myid_subMPI, nprocs_subMPI -+ TEST_INT_TYPE :: mpi_sub_comm -+ TEST_INT_TYPE :: myid_sub, nprocs_sub -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ -+ status = 0 -+#ifdef WITH_MPI -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !call setup_mpi(myid, nprocs) -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world, myidMPI,mpierr) -+ call mpi_comm_size(mpi_comm_world, nprocsMPI,mpierr) -+ myid = int(myidMPI,kind=BLAS_KIND) -+ nprocs = int(nprocsMPI,kind=BLAS_KIND) -+ -+ if((mod(nprocs, 4) == 0) .and. (nprocs > 4)) then -+ num_groups = 4 -+ else if(mod(nprocs, 3) == 0) then -+ num_groups = 3 -+ else if(mod(nprocs, 2) == 0) then -+ num_groups = 2 -+ else -+ num_groups = 1 -+ endif -+ -+ group_size = nprocs / num_groups -+ -+ if(num_groups * group_size .ne. nprocs) then -+ print *, "Something went wrong before splitting the communicator" -+ stop 1 -+ else -+ if(myid == 0) then -+ print '((a,i0,a,i0))', "The test will split the global communicator into ", num_groups, " groups of size ", group_size -+ endif -+ endif -+ -+ ! each group of processors will have the same color -+ color = mod(myid, num_groups) -+ ! this will determine the myid in each group -+ key = myid/num_groups -+ !split the communicator -+ colorMPI=int(color,kind=MPI_KIND) -+ keyMPI = int(key, kind=MPI_KIND) -+ call mpi_comm_split(mpi_comm_world, colorMPI, keyMPI, mpi_sub_commMPI, mpierr) -+ mpi_sub_comm = int(mpi_sub_commMPI,kind=BLAS_KIND) -+ color = int(colorMPI,kind=BLAS_KIND) -+ key = int(keyMPI,kind=BLAS_KIND) -+ if(mpierr .ne. MPI_SUCCESS) then -+ print *, "communicator splitting not successfull", mpierr -+ stop 1 -+ endif -+ -+ call mpi_comm_rank(mpi_sub_commMPI, myid_subMPI, mpierr) -+ call mpi_comm_size(mpi_sub_commMPI, nprocs_subMPI, mpierr) -+ myid_sub = int(myid_subMPI,kind=BLAS_KIND) -+ nprocs_sub = int(nprocs_subMPI,kind=BLAS_KIND) -+ -+ !print *, "glob ", myid, nprocs, ", loc ", myid_sub, nprocs_sub, ", color ", color, ", key ", key -+ -+ if((mpierr .ne. MPI_SUCCESS) .or. (nprocs_sub .ne. group_size) .or. (myid_sub >= group_size)) then -+ print *, "something wrong with the sub communicators" -+ stop 1 -+ endif -+ -+ -+#ifdef HAVE_REDIRECT -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs_sub))),2,-1 -+ if(mod(nprocs_sub,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs_sub/np_cols -+ assert(nprocs_sub == np_rows * np_cols) -+ assert(nprocs == np_rows * np_cols * num_groups) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+ print '(a)', 'Process layout: ' // layout -+ print *,'' -+ endif -+ if (myid_sub == 0) then -+ print '(4(a,i0))','GROUP ', color, ': Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs_sub -+ endif -+ -+ ! USING the subcommunicator -+ call set_up_blacsgrid(int(mpi_sub_comm,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ !call prepare_matrix_analytic(na, a, nblk, myid_sub, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ call prepare_matrix_random(na, myid_sub, sc_desc, a, z, as) -+ as(:,:) = a(:,:) -+ -+ e => elpa_allocate(error_elpa) -+ call set_basic_params(e, na, nev, na_rows, na_cols, mpi_sub_comm, my_prow, my_pcol) -+ -+ call e%set("timings",1, error_elpa) -+ -+ call e%set("debug",1, error_elpa) -+ call e%set("gpu", 0, error_elpa) -+ !call e%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e%setup()) -+ -+ -+ -+! if(myid == 0) print *, "parameters of e" -+! call e%print_all_parameters() -+! if(myid == 0) print *, "" -+ -+ -+ call e%timer_start("eigenvectors") -+ call e%eigenvectors(a, ev, z, error_elpa) -+ call e%timer_stop("eigenvectors") -+ -+ assert_elpa_ok(error_elpa) -+ -+ !status = check_correctness_analytic(na, nev, ev, z, nblk, myid_sub, np_rows, np_cols, my_prow, my_pcol, & -+ ! .true., .true., print_times=.false.) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid_sub, & -+ np_rows,np_cols, my_prow, my_pcol) -+ if (status /= 0) & -+ print *, "processor ", myid, ": Result incorrect for processor group ", color -+ -+ if (myid .eq. 0) then -+ print *, "Showing times of one goup only" -+ call e%print_times("eigenvectors") -+ endif -+ -+ call elpa_deallocate(e, error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+#endif -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, communicator, my_prow, my_pcol) -+ use iso_c_binding -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol, communicator -+ -+#ifdef WITH_MPI -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa%set("mpi_comm_parent", int(communicator,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.05.001/examples/Makefile_hybrid elpa-2020.05.001_ok/examples/Makefile_hybrid ---- elpa-2020.05.001/examples/Makefile_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Makefile_hybrid 2020-06-25 10:51:55.761200000 +0200 -@@ -0,0 +1,14 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -qopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+LIBS = -L$(ELPA_LIB_OPENMP) -lelpa_openmp -lelpatest_openmp -lelpa $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -qopenmp -+ -+all: test_real_e1_omp test_real_e2_omp -+ -+test_real_e1_omp: test_real_e1.F90 -+ $(F90) -DWITH_OPENMP -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2_omp: test_real_e2.F90 -+ $(F90) -DWITH_OPENMP -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.05.001/examples/Makefile_pure elpa-2020.05.001_ok/examples/Makefile_pure ---- elpa-2020.05.001/examples/Makefile_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Makefile_pure 2020-06-25 10:51:55.762301000 +0200 -@@ -0,0 +1,14 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -+ -+all: test_real_e1 test_real_e2 -+ -+test_real_e1: test_real_e1.F90 -+ $(F90) -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2: test_real_e2.F90 -+ $(F90) -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.05.001/examples/Makefile_pure_cuda elpa-2020.05.001_ok/examples/Makefile_pure_cuda ---- elpa-2020.05.001/examples/Makefile_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/Makefile_pure_cuda 2020-06-25 10:51:55.763203000 +0200 -@@ -0,0 +1,14 @@ -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LAPACK_LIB = -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -lcudart -+CC = mpicc -O3 -+ -+all: test_real_e1 test_real_e2 -+ -+test_real_e1: test_real_e1.F90 -+ $(F90) -DCUDA -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2: test_real_e2.F90 -+ $(F90) -DCUDA -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.05.001/examples/test_real2.F90 elpa-2020.05.001_ok/examples/test_real2.F90 ---- elpa-2020.05.001/examples/test_real2.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/test_real2.F90 2020-06-25 10:46:46.651243000 +0200 -@@ -0,0 +1,237 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (4000, 1500, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+program test_real_example -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ -+ use iso_c_binding -+ -+ use elpa -+ -+#ifdef HAVE_MPI_MODULE -+ use mpi -+ implicit none -+#else -+ implicit none -+ include 'mpif.h' -+#endif -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ integer :: nblk -+ integer :: na, nev -+ -+ integer :: np_rows, np_cols, na_rows, na_cols -+ -+ integer :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ integer :: i, mpierr, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ integer, external :: numroc -+ -+ real(kind=c_double), allocatable :: a(:,:), z(:,:), ev(:) -+ -+ integer :: iseed(4096) ! Random seed, size should be sufficient for every generator -+ -+ integer :: STATUS -+ integer :: success -+ character(len=8) :: task_suffix -+ integer :: j -+ -+ integer, parameter :: error_units = 0 -+ -+ class(elpa_t), pointer :: e -+ !------------------------------------------------------------------------------- -+ -+ -+ ! default parameters -+ na = 1000 -+ nev = 500 -+ nblk = 16 -+ -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world,myid,mpierr) -+ call mpi_comm_size(mpi_comm_world,nprocs,mpierr) -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ ! initialise BLACS -+ my_blacs_ctxt = mpi_comm_world -+ call BLACS_Gridinit(my_blacs_ctxt, 'C', np_rows, np_cols) -+ call BLACS_Gridinfo(my_blacs_ctxt, nprow, npcol, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ ! determine the neccessary size of the distributed matrices, -+ ! we use the scalapack tools routine NUMROC -+ -+ na_rows = numroc(na, nblk, my_prow, 0, np_rows) -+ na_cols = numroc(na, nblk, my_pcol, 0, np_cols) -+ -+ -+ ! set up the scalapack descriptor for the checks below -+ ! For ELPA the following restrictions hold: -+ ! - block sizes in both directions must be identical (args 4 a. 5) -+ ! - first row and column of the distributed matrix must be on -+ ! row/col 0/0 (arg 6 and 7) -+ -+ call descinit(sc_desc, na, na, nblk, nblk, 0, 0, my_blacs_ctxt, na_rows, info) -+ -+ if (info .ne. 0) then -+ write(error_units,*) 'Error in BLACS descinit! info=',info -+ write(error_units,*) 'Most likely this happend since you want to use' -+ write(error_units,*) 'more MPI tasks than are possible for your' -+ write(error_units,*) 'problem size (matrix size and blocksize)!' -+ write(error_units,*) 'The blacsgrid can not be set up properly' -+ write(error_units,*) 'Try reducing the number of MPI tasks...' -+ call MPI_ABORT(mpi_comm_world, 1, mpierr) -+ endif -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ ! we want different random numbers on every process -+ ! (otherwise A might get rank deficient): -+ -+ iseed(:) = myid -+ call RANDOM_SEED(put=iseed) -+ call RANDOM_NUMBER(z) -+ -+ a(:,:) = z(:,:) -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)' -+ endif -+ call pdtran(na, na, 1.d0, z, 1, 1, sc_desc, 1.d0, a, 1, 1, sc_desc) ! A = A + Z**T -+ -+ !------------------------------------------------------------------------------- -+ -+ if (elpa_init(20171201) /= elpa_ok) then -+ print *, "ELPA API version not supported" -+ stop -+ endif -+ e => elpa_allocate() -+ -+ ! set parameters decribing the matrix and it's MPI distribution -+ call e%set("na", na, success) -+ call e%set("nev", nev, success) -+ call e%set("local_nrows", na_rows, success) -+ call e%set("local_ncols", na_cols, success) -+ call e%set("nblk", nblk, success) -+ call e%set("mpi_comm_parent", mpi_comm_world, success) -+ call e%set("process_row", my_prow, success) -+ call e%set("process_col", my_pcol, success) -+ -+ success = e%setup() -+ -+ call e%set("solver", elpa_solver_2stage, success) -+ -+ -+ ! Calculate eigenvalues/eigenvectors -+ -+ if (myid==0) then -+ print '(a)','| Entering two-step ELPA solver ... ' -+ print * -+ end if -+ -+ call mpi_barrier(mpi_comm_world, mpierr) ! for correct timings only -+ call e%eigenvectors(a, ev, z, success) -+ -+ if (myid==0) then -+ print '(a)','| Two-step ELPA solver complete.' -+ print * -+ end if -+ -+ call elpa_deallocate(e) -+ call elpa_uninit() -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+end -+ -diff -ruN elpa-2020.05.001/examples/test_real.F90 elpa-2020.05.001_ok/examples/test_real.F90 ---- elpa-2020.05.001/examples/test_real.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.05.001_ok/examples/test_real.F90 2020-06-25 10:46:26.636394000 +0200 -@@ -0,0 +1,237 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 1 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (4000, 1500, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+program test_real_example -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ -+ use iso_c_binding -+ -+ use elpa -+ -+#ifdef HAVE_MPI_MODULE -+ use mpi -+ implicit none -+#else -+ implicit none -+ include 'mpif.h' -+#endif -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ integer :: nblk -+ integer :: na, nev -+ -+ integer :: np_rows, np_cols, na_rows, na_cols -+ -+ integer :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ integer :: i, mpierr, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ integer, external :: numroc -+ -+ real(kind=c_double), allocatable :: a(:,:), z(:,:), ev(:) -+ -+ integer :: iseed(4096) ! Random seed, size should be sufficient for every generator -+ -+ integer :: STATUS -+ integer :: success -+ character(len=8) :: task_suffix -+ integer :: j -+ -+ integer, parameter :: error_units = 0 -+ -+ class(elpa_t), pointer :: e -+ !------------------------------------------------------------------------------- -+ -+ -+ ! default parameters -+ na = 1000 -+ nev = 500 -+ nblk = 16 -+ -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world,myid,mpierr) -+ call mpi_comm_size(mpi_comm_world,nprocs,mpierr) -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ ! initialise BLACS -+ my_blacs_ctxt = mpi_comm_world -+ call BLACS_Gridinit(my_blacs_ctxt, 'C', np_rows, np_cols) -+ call BLACS_Gridinfo(my_blacs_ctxt, nprow, npcol, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ ! determine the neccessary size of the distributed matrices, -+ ! we use the scalapack tools routine NUMROC -+ -+ na_rows = numroc(na, nblk, my_prow, 0, np_rows) -+ na_cols = numroc(na, nblk, my_pcol, 0, np_cols) -+ -+ -+ ! set up the scalapack descriptor for the checks below -+ ! For ELPA the following restrictions hold: -+ ! - block sizes in both directions must be identical (args 4 a. 5) -+ ! - first row and column of the distributed matrix must be on -+ ! row/col 0/0 (arg 6 and 7) -+ -+ call descinit(sc_desc, na, na, nblk, nblk, 0, 0, my_blacs_ctxt, na_rows, info) -+ -+ if (info .ne. 0) then -+ write(error_units,*) 'Error in BLACS descinit! info=',info -+ write(error_units,*) 'Most likely this happend since you want to use' -+ write(error_units,*) 'more MPI tasks than are possible for your' -+ write(error_units,*) 'problem size (matrix size and blocksize)!' -+ write(error_units,*) 'The blacsgrid can not be set up properly' -+ write(error_units,*) 'Try reducing the number of MPI tasks...' -+ call MPI_ABORT(mpi_comm_world, 1, mpierr) -+ endif -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ ! we want different random numbers on every process -+ ! (otherwise A might get rank deficient): -+ -+ iseed(:) = myid -+ call RANDOM_SEED(put=iseed) -+ call RANDOM_NUMBER(z) -+ -+ a(:,:) = z(:,:) -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)' -+ endif -+ call pdtran(na, na, 1.d0, z, 1, 1, sc_desc, 1.d0, a, 1, 1, sc_desc) ! A = A + Z**T -+ -+ !------------------------------------------------------------------------------- -+ -+ if (elpa_init(20171201) /= elpa_ok) then -+ print *, "ELPA API version not supported" -+ stop -+ endif -+ e => elpa_allocate() -+ -+ ! set parameters decribing the matrix and it's MPI distribution -+ call e%set("na", na, success) -+ call e%set("nev", nev, success) -+ call e%set("local_nrows", na_rows, success) -+ call e%set("local_ncols", na_cols, success) -+ call e%set("nblk", nblk, success) -+ call e%set("mpi_comm_parent", mpi_comm_world, success) -+ call e%set("process_row", my_prow, success) -+ call e%set("process_col", my_pcol, success) -+ -+ success = e%setup() -+ -+ call e%set("solver", elpa_solver_1stage, success) -+ -+ -+ ! Calculate eigenvalues/eigenvectors -+ -+ if (myid==0) then -+ print '(a)','| Entering one-step ELPA solver ... ' -+ print * -+ end if -+ -+ call mpi_barrier(mpi_comm_world, mpierr) ! for correct timings only -+ call e%eigenvectors(a, ev, z, success) -+ -+ if (myid==0) then -+ print '(a)','| One-step ELPA solver complete.' -+ print * -+ end if -+ -+ call elpa_deallocate(e) -+ call elpa_uninit() -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+end -+ diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-gomkl-2021-gpu.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-gomkl-2021-gpu.eb deleted file mode 100644 index 4648866e6368894e20feb8841b783ccba30a525b..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-gomkl-2021-gpu.eb +++ /dev/null @@ -1,107 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -${MKLROOT}/lib/intel64/libmkl_blacs_openmpi_lp64.a --lmkl_gf_lp64 -lmkl_sequential -lmkl_core --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_70" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-gomkl-2021.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-gomkl-2021.eb deleted file mode 100644 index 95b13a868884a3dd8ef057983b988feb4de1aa72..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-gomkl-2021.eb +++ /dev/null @@ -1,94 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -${MKLROOT}/lib/intel64/libmkl_blacs_openmpi_lp64.a --lmkl_gf_lp64 -lmkl_sequential[-lmkl_gnu_thread] --lmkl_core -lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-gpsmkl-2021-gpu.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-gpsmkl-2021-gpu.eb deleted file mode 100644 index b946e509098082b360a22b40121f14cb82bf81a4..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-gpsmkl-2021-gpu.eb +++ /dev/null @@ -1,106 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 --lmkl_gf_lp64 -lmkl_sequential -lmkl_core --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_70" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-gpsmkl-2021.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-gpsmkl-2021.eb deleted file mode 100644 index caae290801be91602f08529f4e9ff5622fb2cb36..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-gpsmkl-2021.eb +++ /dev/null @@ -1,93 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_gf_lp64 --lmkl_sequential[-lmkl_gnu_thread] --lmkl_core -lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-2021-gpu.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-2021-gpu.eb deleted file mode 100644 index 9683ebdd638b01c8fa4386eef3c46c3fb3722d2a..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-2021-gpu.eb +++ /dev/null @@ -1,107 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 --lmkl_intel_lp64 -lmkl_sequential -lmkl_core --liomp5 -lpthread -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] -checksums = ['15591f142eeaa98ab3201d27ca9ac328e21beabf0803b011a04183fcaf6efdde'] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), - ('Python', '3.8.5'), -] - -preconfigopts = 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_70" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-2021.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-2021.eb deleted file mode 100644 index 315c7ecda72f0753321a18af8c2db8b5c76dfdf5..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-2021.eb +++ /dev/null @@ -1,93 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_intel_lp64 --lmkl_sequential[-lmkl_intel_thread] --lmkl_core -liomp5 -lpthread -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] -checksums = ['15591f142eeaa98ab3201d27ca9ac328e21beabf0803b011a04183fcaf6efdde'] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-para-2021-gpu.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-para-2021-gpu.eb deleted file mode 100644 index ea2adb9ae85a397ac37127265ff9e21d2613f5f2..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-para-2021-gpu.eb +++ /dev/null @@ -1,105 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 --lmkl_intel_lp64 -lmkl_sequential -lmkl_core --liomp5 -lpthread -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_70" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-para-2021.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-para-2021.eb deleted file mode 100644 index 6ae1a590c091809e29a6c690661047a09dd2db72..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-intel-para-2021.eb +++ /dev/null @@ -1,93 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_intel_lp64 --lmkl_sequential[-lmkl_intel_thread] --lmkl_core -liomp5 -lpthread -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] -checksums = ['15591f142eeaa98ab3201d27ca9ac328e21beabf0803b011a04183fcaf6efdde'] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-iomkl-2021-gpu.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-iomkl-2021-gpu.eb deleted file mode 100644 index 7e4da4bcea831c7d81017da28191d4dc61e51900..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-iomkl-2021-gpu.eb +++ /dev/null @@ -1,107 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -${MKLROOT}/lib/intel64/libmkl_blacs_openmpi_lp64.a --lmkl_intel_lp64 -lmkl_sequential -lmkl_core --liomp5 -lpthread -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] -checksums = ['15591f142eeaa98ab3201d27ca9ac328e21beabf0803b011a04183fcaf6efdde'] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_70" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001-iomkl-2021.eb b/Golden_Repo/e/ELPA/ELPA-2020.11.001-iomkl-2021.eb deleted file mode 100644 index d66eade6ea3bd892c11faa3a28d49b9b526e7dc3..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001-iomkl-2021.eb +++ /dev/null @@ -1,94 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -${MKLROOT}/lib/intel64/libmkl_blacs_openmpi_lp64.a --lmkl_intel_lp64 -lmkl_sequential[-lmkl_intel_thread] --lmkl_core -liomp -lpthread -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] -checksums = ['15591f142eeaa98ab3201d27ca9ac328e21beabf0803b011a04183fcaf6efdde'] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Golden_Repo/e/ELPA/ELPA-2020.11.001_install-libelpatest.patch b/Golden_Repo/e/ELPA/ELPA-2020.11.001_install-libelpatest.patch deleted file mode 100644 index f4b825b158b418f264adf625455bff32ed7cc245..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ELPA/ELPA-2020.11.001_install-libelpatest.patch +++ /dev/null @@ -1,10535 +0,0 @@ ---- elpa-2020.11.001/Makefile.am 2020-12-22 08:42:55.000000000 +0100 -+++ elpa-2020.11.001_ok/Makefile.am 2021-02-02 12:59:31.635462357 +0100 -@@ -628,7 +628,7 @@ - test_program_fcflags = $(AM_FCFLAGS) $(FC_MODOUT)test_modules $(FC_MODINC)test_modules $(FC_MODINC)modules $(FC_MODINC)private_modules - - # library with shared sources for the test files --noinst_LTLIBRARIES += libelpatest@SUFFIX@.la -+lib_LTLIBRARIES += libelpatest@SUFFIX@.la - libelpatest@SUFFIX@_la_FCFLAGS = $(test_program_fcflags) - libelpatest@SUFFIX@_la_SOURCES = \ - test/shared/tests_variable_definitions.F90 \ -diff -ruN elpa-2020.11.001/examples/C/Makefile_examples_hybrid elpa-2020.11.001_ok/examples/C/Makefile_examples_hybrid ---- elpa-2020.11.001/examples/C/Makefile_examples_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/Makefile_examples_hybrid 2021-07-02 10:32:19.117127000 +0200 -@@ -0,0 +1,31 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -qopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+# GCC -+# F90 = mpif90 -O3 -fopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB_OPENMP) -lelpa_openmp -lelpatest_openmp $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -qopenmp -+# GCC -+# CC = mpicc -O3 -fopenmp -+ -+all: test_real_1stage_hybrid test_real_2stage_all_kernels_hybrid test_autotune_hybrid test_multiple_objs_hybrid -+ -+test_real_1stage_hybrid: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DWITH_OPENMP -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels_hybrid: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DWITH_OPENMP -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_autotune_hybrid: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs_hybrid: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.11.001/examples/C/Makefile_examples_pure elpa-2020.11.001_ok/examples/C/Makefile_examples_pure ---- elpa-2020.11.001/examples/C/Makefile_examples_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/Makefile_examples_pure 2021-07-02 10:37:44.708232000 +0200 -@@ -0,0 +1,27 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs -+ -+test_real_1stage: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_autotune: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.11.001/examples/C/Makefile_examples_pure_cuda elpa-2020.11.001_ok/examples/C/Makefile_examples_pure_cuda ---- elpa-2020.11.001/examples/C/Makefile_examples_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/Makefile_examples_pure_cuda 2021-07-02 10:40:47.722615000 +0200 -@@ -0,0 +1,27 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -lcublas -lcudart -+CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs -+ -+test_real_1stage: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_autotune: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.11.001/examples/C/test_autotune.c elpa-2020.11.001_ok/examples/C/test_autotune.c ---- elpa-2020.11.001/examples/C/test_autotune.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/test_autotune.c 2021-07-02 10:41:06.048699058 +0200 -@@ -0,0 +1,335 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <string.h> -+#include <stdio.h> -+#include <stdlib.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+//#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+//#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+//#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# else -+# define MATRIX_TYPE complex float -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# else -+# define MATRIX_TYPE complex double -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+ -+#ifdef HAVE_64BIT_INTEGER_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#include "generated.h" -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE status; -+ int error_elpa; -+ elpa_t handle; -+ -+ elpa_autotune_t autotune_handle; -+ C_INT_TYPE i, unfinished; -+ -+ C_INT_TYPE value; -+#ifdef WITH_MPI -+ MPI_Init(&argc, &argv); -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocs); -+ MPI_Comm_rank(MPI_COMM_WORLD, &myid); -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_real_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_real_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_complex_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_complex_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ handle = elpa_allocate(); -+#else -+ handle = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ /* Set parameters */ -+ elpa_set(handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (myid == 0) { -+ printf("Setting the matrix parameters na=%d, nev=%d \n",na,nev); -+ } -+ elpa_set(handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(handle)); -+ -+ elpa_set(handle, "gpu", 0, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ autotune_handle = elpa_autotune_setup(handle, ELPA_AUTOTUNE_FAST, ELPA_AUTOTUNE_DOMAIN_REAL, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ /* mimic 20 scf steps */ -+ -+ for (i=0; i < 20; i++) { -+ -+ unfinished = elpa_autotune_step(handle, autotune_handle, &error_elpa); -+ -+ if (unfinished == 0) { -+ if (myid == 0) { -+ printf("ELPA autotuning finished in the %d th scf step \n",i); -+ } -+ break; -+ } -+ if (myid == 0) { -+ printf("The current setting of the ELPA object: \n"); -+ elpa_print_settings(handle, &error_elpa); -+ -+ printf("The state of the autotuning: \n"); -+ elpa_autotune_print_state(handle, autotune_handle, &error_elpa); -+ } -+ -+ -+ /* Solve EV problem */ -+ elpa_eigenvectors(handle, a, ev, z, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* check the results */ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_real_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(double)); -+ -+#else -+ status = check_correctness_evp_numeric_residuals_real_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(float)); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_complex_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex double)); -+#else -+ status = check_correctness_evp_numeric_residuals_complex_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex float)); -+#endif -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ break; -+ } -+ printf("hier %d \n",myid); -+ } -+ -+ if (unfinished == 1) { -+ if (myid == 0) { -+ printf("ELPA autotuning did not finished during %d scf cycles\n",i); -+ -+ } -+ -+ } -+ elpa_autotune_set_best(handle, autotune_handle, &error_elpa); -+ -+ if (myid == 0) { -+ printf("The best combination found by the autotuning:\n"); -+ elpa_autotune_print_best(handle, autotune_handle, &error_elpa); -+ } -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_autotune_deallocate(autotune_handle); -+ elpa_deallocate(handle); -+#else -+ elpa_autotune_deallocate(autotune_handle, &error_elpa); -+ elpa_deallocate(handle, &error_elpa); -+#endif -+ elpa_uninit(&error_elpa); -+ -+ if (myid == 0) { -+ printf("\n"); -+ printf("2stage ELPA real solver complete\n"); -+ printf("\n"); -+ } -+ -+ if (status ==0){ -+ if (myid ==0) { -+ printf("All ok!\n"); -+ } -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.11.001/examples/C/test.c elpa-2020.11.001_ok/examples/C/test.c ---- elpa-2020.11.001/examples/C/test.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/test.c 2021-07-02 10:41:14.785601626 +0200 -@@ -0,0 +1,339 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <stdio.h> -+#include <stdlib.h> -+#include <string.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_GENERALIZED_DECOMP_EIGENPROBLEM -+#define TEST_GENERALIZED_EIGENPROBLEM -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_real_single_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_real_single_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_real_single_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_real_single_f -+# else -+# define MATRIX_TYPE complex float -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_complex_single_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_complex_single_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_complex_single_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_complex_single_f -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_real_double_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_real_double_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_real_double_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_real_double_f -+# else -+# define MATRIX_TYPE complex double -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_complex_double_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_complex_double_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_complex_double_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_complex_double_f -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#define C_INT_MPI_TYPE long int -+#else -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#define C_INT_MPI_TYPE int -+#endif -+#include "generated.h" -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_MPI_TYPE myidMPI, nprocsMPI; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ C_INT_MPI_TYPE provided_mpi_thread_level; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z, *b, *bs; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE error, status; -+ int error_elpa; -+ -+ elpa_t handle; -+ -+ int value; -+#ifdef WITH_MPI -+#ifndef WITH_OPENMP_TRADITIONAL -+ MPI_Init(&argc, &argv); -+#else -+ MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &provided_mpi_thread_level); -+ -+ if (provided_mpi_thread_level != MPI_THREAD_MULTIPLE) { -+ fprintf(stderr, "MPI ERROR: MPI_THREAD_MULTIPLE is not provided on this system\n"); -+ MPI_Finalize(); -+ exit(77); -+ } -+#endif -+ -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocsMPI); -+ nprocs = (C_INT_TYPE) nprocsMPI; -+ MPI_Comm_rank(MPI_COMM_WORLD, &myidMPI); -+ myid = (C_INT_TYPE) myidMPI; -+ -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+ PREPARE_MATRIX_RANDOM(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ b = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ bs = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ PREPARE_MATRIX_RANDOM_SPD(na, myid, na_rows, na_cols, sc_desc, b, z, bs, nblk, np_rows, np_cols, my_prow, my_pcol); -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+ handle = elpa_allocate(&error_elpa); -+ //assert_elpa_ok(error_elpa); -+ -+ /* Set parameters */ -+ elpa_set(handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (myid == 0) { -+ printf("Setting the matrix parameters na=%d, nev=%d \n",na,nev); -+ } -+ elpa_set(handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ elpa_set(handle, "blacs_context", (int) my_blacs_ctxt, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(handle)); -+ -+ /* Set tunables */ -+#ifdef TEST_SOLVER_1STAGE -+ elpa_set(handle, "solver", ELPA_SOLVER_1STAGE, &error_elpa); -+#else -+ elpa_set(handle, "solver", ELPA_SOLVER_2STAGE, &error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "gpu", TEST_GPU, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#if defined(TEST_SOLVE_2STAGE) && defined(TEST_KERNEL) -+# ifdef TEST_COMPLEX -+ elpa_set(handle, "complex_kernel", TEST_KERNEL, &error_elpa); -+# else -+ elpa_set(handle, "real_kernel", TEST_KERNEL, &error_elpa); -+# endif -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ elpa_get(handle, "solver", &value, &error_elpa); -+ if (myid == 0) { -+ printf("Solver is set to %d \n", value); -+ } -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ elpa_generalized_eigenvectors(handle, a, b, ev, z, 0, &error_elpa); -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ //a = as, so that the problem can be solved again -+ memcpy(a, as, na_rows * na_cols * sizeof(MATRIX_TYPE)); -+ elpa_generalized_eigenvectors(handle, a, b, ev, z, 1, &error_elpa); -+#endif -+#else -+ /* Solve EV problem */ -+ elpa_eigenvectors(handle, a, ev, z, &error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ elpa_deallocate(handle, &error_elpa); -+ elpa_uninit(&error_elpa); -+ -+ /* check the results */ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ status = CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs); -+#else -+ status = CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ } -+ if (status ==0){ -+ printf("All ok!\n"); -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ free(b); -+ free(bs); -+#endif -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.11.001/examples/C/test_multiple_objs.c elpa-2020.11.001_ok/examples/C/test_multiple_objs.c ---- elpa-2020.11.001/examples/C/test_multiple_objs.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/test_multiple_objs.c 2021-07-02 10:41:21.945970887 +0200 -@@ -0,0 +1,387 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <string.h> -+#include <stdio.h> -+#include <stdlib.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+//#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+//#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+//#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# else -+# define MATRIX_TYPE complex float -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# else -+# define MATRIX_TYPE complex double -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+#ifdef HAVE_64BIT_INTEGER_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#include "generated.h" -+void set_basic_parameters(elpa_t *handle, C_INT_TYPE na, C_INT_TYPE nev, C_INT_TYPE na_rows, C_INT_TYPE na_cols, C_INT_TYPE nblk, C_INT_TYPE my_prow, C_INT_TYPE my_pcol){ -+ int error_elpa; -+ elpa_set(*handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(*handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+} -+ -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE status; -+ int error_elpa; -+ int gpu, timings, debug; -+ char str[400]; -+ -+ elpa_t elpa_handle_1, elpa_handle_2, *elpa_handle_ptr; -+ -+ elpa_autotune_t autotune_handle; -+ C_INT_TYPE i, unfinished; -+ -+ C_INT_TYPE value; -+#ifdef WITH_MPI -+ MPI_Init(&argc, &argv); -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocs); -+ MPI_Comm_rank(MPI_COMM_WORLD, &myid); -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_real_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_real_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_complex_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_complex_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+ elpa_handle_1 = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ set_basic_parameters(&elpa_handle_1, na, nev, na_rows, na_cols, nblk, my_prow, my_pcol); -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(elpa_handle_1)); -+ -+ elpa_set(elpa_handle_1, "gpu", 0, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(elpa_handle_1, "timings", 1, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(elpa_handle_1, "debug", 1, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_store_settings(elpa_handle_1, "initial_parameters.txt", &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ // barrier after store settings, file created from one MPI rank only, but loaded everywhere -+ MPI_Barrier(MPI_COMM_WORLD); -+#endif -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_handle_2 = elpa_allocate(); -+#else -+ elpa_handle_2 = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ set_basic_parameters(&elpa_handle_2, na, nev, na_rows, na_cols, nblk, my_prow, my_pcol); -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(elpa_handle_2)); -+ -+ elpa_load_settings(elpa_handle_2, "initial_parameters.txt", &error_elpa); -+ -+ elpa_get(elpa_handle_2, "gpu", &gpu, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_get(elpa_handle_2, "timings", &timings, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_get(elpa_handle_2, "debug", &debug, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if ((timings != 1) || (debug != 1) || (gpu != 0)){ -+ printf("Parameters not stored or loaded correctly. Aborting... %d, %d, %d\n", timings, debug, gpu); -+ exit(1); -+ } -+ -+ elpa_handle_ptr = &elpa_handle_2; -+ -+ autotune_handle = elpa_autotune_setup(*elpa_handle_ptr, ELPA_AUTOTUNE_FAST, ELPA_AUTOTUNE_DOMAIN_REAL, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ /* mimic 20 scf steps */ -+ -+ for (i=0; i < 20; i++) { -+ -+ unfinished = elpa_autotune_step(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ if (unfinished == 0) { -+ if (myid == 0) { -+ printf("ELPA autotuning finished in the %d th scf step \n",i); -+ } -+ break; -+ } -+ -+ elpa_print_settings(*elpa_handle_ptr, &error_elpa); -+ elpa_autotune_print_state(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ sprintf(str, "saved_parameters_%d.txt", i); -+ elpa_store_settings(*elpa_handle_ptr, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* Solve EV problem */ -+ elpa_eigenvectors(*elpa_handle_ptr, a, ev, z, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* check the results */ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_real_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(double)); -+ -+#else -+ status = check_correctness_evp_numeric_residuals_real_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(float)); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_complex_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex double)); -+#else -+ status = check_correctness_evp_numeric_residuals_complex_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex float)); -+#endif -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ break; -+ } -+ -+ elpa_autotune_print_state(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ sprintf(str, "saved_state_%d.txt", i); -+ elpa_autotune_save_state(*elpa_handle_ptr, autotune_handle, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ //barrier after save state, file created from one MPI rank only, but loaded everywhere -+ MPI_Barrier(MPI_COMM_WORLD); -+#endif -+ -+ elpa_autotune_load_state(*elpa_handle_ptr, autotune_handle, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (unfinished == 1) { -+ if (myid == 0) { -+ printf("ELPA autotuning did not finished during %d scf cycles\n",i); -+ } -+ } -+ -+ } -+ elpa_autotune_set_best(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ if (myid == 0) { -+ printf("The best combination found by the autotuning:\n"); -+ elpa_autotune_print_best(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ } -+ -+ elpa_autotune_deallocate(autotune_handle, &error_elpa); -+ elpa_deallocate(elpa_handle_1, &error_elpa); -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_deallocate(elpa_handle_2); -+#else -+ elpa_deallocate(elpa_handle_2, &error_elpa); -+#endif -+ elpa_uninit(&error_elpa); -+ -+ if (myid == 0) { -+ printf("\n"); -+ printf("2stage ELPA real solver complete\n"); -+ printf("\n"); -+ } -+ -+ if (status ==0){ -+ if (myid ==0) { -+ printf("All ok!\n"); -+ } -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.11.001/examples/Fortran/assert.h elpa-2020.11.001_ok/examples/Fortran/assert.h ---- elpa-2020.11.001/examples/Fortran/assert.h 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/assert.h 2021-02-02 12:54:41.845532000 +0100 -@@ -0,0 +1,7 @@ -+#define stringify_(x) "x" -+#define stringify(x) stringify_(x) -+#define assert(x) call x_a(x, stringify(x), "F", __LINE__) -+ -+#define assert_elpa_ok(error_code) call x_ao(error_code, stringify(error_code), __FILE__, __LINE__) -+ -+! vim: syntax=fortran -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/complex_2stage_banded.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/complex_2stage_banded.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/complex_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/complex_2stage_banded.F90 2021-02-02 12:54:41.866126000 +0100 -@@ -0,0 +1,295 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 complex case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The complex ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "COMPLEX_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+program test_complex2_double_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - COMPLEX version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+#ifdef WITH_MPI -+ !TEST_INT_TYPE, external :: numroc -+#endif -+ complex(kind=ck8), parameter :: CZERO = (0.0_rk8,0.0_rk8), CONE = (1.0_rk8,0.0_rk8) -+ real(kind=rk8), allocatable :: ev(:) -+ -+ complex(kind=ck8), allocatable :: a(:,:), z(:,:), as(:,:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ type(output_t) :: write_to_file -+ integer(kind=c_int) :: error_elpa -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ -+ -+ TEST_INT_TYPE :: numberOfDevices -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+ -+#define COMPLEXCASE -+#define DOUBLE_PRECISION_COMPLEX 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - COMPLEX version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ ! Determine the necessary size of the distributed matrices, -+ ! we use the Scalapack tools routine NUMROC for that. -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0 -+ as(local_row, local_col) = 0 -+ end if -+ end do -+ end do -+ -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/double_instance.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/double_instance.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/double_instance.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/double_instance.F90 2021-02-02 12:54:41.866517000 +0100 -@@ -0,0 +1,244 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "../assert.h" -+ -+program test_interface -+ use elpa -+ -+ use precision_for_tests -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ real(kind=C_DOUBLE), allocatable :: a1(:,:), as1(:,:) -+ ! eigenvectors -+ real(kind=C_DOUBLE), allocatable :: z1(:,:) -+ ! eigenvalues -+ real(kind=C_DOUBLE), allocatable :: ev1(:) -+ -+ ! The Matrix -+ complex(kind=C_DOUBLE_COMPLEX), allocatable :: a2(:,:), as2(:,:) -+ ! eigenvectors -+ complex(kind=C_DOUBLE_COMPLEX), allocatable :: z2(:,:) -+ ! eigenvalues -+ real(kind=C_DOUBLE), allocatable :: ev2(:) -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ TEST_INT_TYPE :: solver -+ TEST_INT_TYPE :: qr -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e1, e2 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+ -+ status = 0 -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ -+ np_rows = nprocs/np_cols -+ -+ my_prow = mod(myid, np_cols) -+ my_pcol = myid / np_cols -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a1 (na_rows,na_cols), as1(na_rows,na_cols)) -+ allocate(z1 (na_rows,na_cols)) -+ allocate(ev1(na)) -+ -+ a1(:,:) = 0.0 -+ z1(:,:) = 0.0 -+ ev1(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a1, z1, as1) -+ allocate(a2 (na_rows,na_cols), as2(na_rows,na_cols)) -+ allocate(z2 (na_rows,na_cols)) -+ allocate(ev2(na)) -+ -+ a2(:,:) = 0.0 -+ z2(:,:) = 0.0 -+ ev2(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a2, z2, as2) -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e1 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e1%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ assert(e1%setup() .eq. ELPA_OK) -+ -+ call e1%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("real_kernel", ELPA_2STAGE_REAL_DEFAULT, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ e2 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e2%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e2%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ assert(e2%setup() .eq. ELPA_OK) -+ -+ call e2%set("solver", ELPA_SOLVER_1STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%eigenvectors(a1, ev1, z1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e2%eigenvectors(a2, ev2, z2, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e2, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as1, z1, ev1, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a1) -+ deallocate(as1) -+ deallocate(z1) -+ deallocate(ev1) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as2, z2, ev2, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a2) -+ deallocate(as2) -+ deallocate(z2) -+ deallocate(ev2) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+ -+ -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/real_2stage_banded.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/real_2stage_banded.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/real_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/real_2stage_banded.F90 2021-02-02 12:54:41.866398000 +0100 -@@ -0,0 +1,294 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The real ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "REAL_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+program test_real2_double_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+ !TEST_INT_TYPE, external :: numroc -+ -+ real(kind=rk8), allocatable :: a(:,:), z(:,:), as(:,:), ev(:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ integer(kind=c_int) :: error_elpa -+ TEST_INT_TYPE :: numberOfDevices -+ type(output_t) :: write_to_file -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+#define DOUBLE_PRECISION_REAL 1 -+ -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+#define REALCASE -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - REAL version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g(local_row, nblk, my_prow, np_rows) -+ do local_col = 1, na_cols -+ global_col = index_l2g(local_col, nblk, my_pcol, np_cols) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0.0 -+ as(local_row, local_col) = 0.0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/single_complex_2stage_banded.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/single_complex_2stage_banded.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/single_complex_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/single_complex_2stage_banded.F90 2021-02-02 12:54:41.866260000 +0100 -@@ -0,0 +1,295 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 complex case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The complex ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "COMPLEX_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+program test_complex2_single_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - COMPLEX version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+#ifdef WITH_MPI -+ !TEST_INT_TYPE, external :: numroc -+#endif -+ complex(kind=ck4), parameter :: CZERO = (0.0_rk4,0.0_rk4), CONE = (1.0_rk4,0.0_rk4) -+ real(kind=rk4), allocatable :: ev(:) -+ -+ complex(kind=ck4), allocatable :: a(:,:), z(:,:), as(:,:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ type(output_t) :: write_to_file -+ integer(kind=ik) :: error_elpa -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ -+ -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+ -+#define COMPLEXCASE -+#define DOUBLE_PRECISION_COMPLEX 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - COMPLEX version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ ! Determine the necessary size of the distributed matrices, -+ ! we use the Scalapack tools routine NUMROC for that. -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0 -+ as(local_row, local_col) = 0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/single_real_2stage_banded.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/single_real_2stage_banded.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/single_real_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/single_real_2stage_banded.F90 2021-02-02 12:54:41.866634000 +0100 -@@ -0,0 +1,287 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The real ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "REAL_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+program test_real2_single_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ use tests_scalapack_interfaces -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ real(kind=rk4), allocatable :: a(:,:), z(:,:), as(:,:), ev(:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ integer(kind=c_int) :: error_elpa -+ type(output_t) :: write_to_file -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+#define DOUBLE_PRECISION_REAL 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ -+ STATUS = 0 -+ -+#define REALCASE -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - REAL version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0.0 -+ as(local_row, local_col) = 0.0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.11.001/examples/Fortran/elpa_print_headers.F90 elpa-2020.11.001_ok/examples/Fortran/elpa_print_headers.F90 ---- elpa-2020.11.001/examples/Fortran/elpa_print_headers.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa_print_headers.F90 2021-02-02 12:54:41.858363000 +0100 -@@ -0,0 +1,273 @@ -+#if 0 -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+! ELPA1 -- Faster replacements for ScaLAPACK symmetric eigenvalue routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+#endif -+ -+#ifdef WITH_OPENMP_TRADITIONAL -+ if (myid .eq. 0) then -+ print *,"Threaded version of test program" -+ print *,"Using ",omp_get_max_threads()," threads" -+ print *," " -+ endif -+#endif -+ -+#ifndef WITH_MPI -+ if (myid .eq. 0) then -+ print *,"This version of ELPA does not support MPI parallelisation" -+ print *,"For MPI support re-build ELPA with appropiate flags" -+ print *," " -+ endif -+#endif -+ -+#ifdef ELPA1 -+ -+#ifdef REALCASE -+#ifdef DOUBLE_PRECISION_REAL -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued double-precision version of ELPA1 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued single-precision version of ELPA1 is used" -+ print *," " -+ endif -+#endif -+ -+#endif -+ -+#ifdef COMPLEXCASE -+#ifdef DOUBLE_PRECISION_COMPLEX -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued double-precision version of ELPA1 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued single-precision version of ELPA1 is used" -+ print *," " -+ endif -+#endif -+ -+#endif /* DATATYPE */ -+ -+#else /* ELPA1 */ -+ -+#ifdef REALCASE -+#ifdef DOUBLE_PRECISION_REAL -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued double-precision version of ELPA2 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued single-precision version of ELPA2 is used" -+ print *," " -+ endif -+#endif -+ -+#endif -+ -+#ifdef COMPLEXCASE -+#ifdef DOUBLE_PRECISION_COMPLEX -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued double-precision version of ELPA2 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued single-precision version of ELPA2 is used" -+ print *," " -+ endif -+#endif -+ -+#endif /* DATATYPE */ -+ -+#endif /* ELPA1 */ -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+#ifdef HAVE_REDIRECT -+ if (check_redirect_environment_variable()) then -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Redirection of mpi processes is used" -+ print *," " -+ if (create_directories() .ne. 1) then -+ write(error_unit,*) "Unable to create directory for stdout and stderr!" -+ stop 1 -+ endif -+ endif -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call redirect_stdout(myid) -+ endif -+#endif -+ -+#ifndef ELPA1 -+ -+ if (myid .eq. 0) then -+ print *," " -+ print *,"This ELPA2 is build with" -+#ifdef WITH_GPU_KERNEL -+ print *,"GPU support" -+#endif -+ print *," " -+#ifdef REALCASE -+ -+#ifdef HAVE_AVX2 -+ -+#ifdef WITH_REAL_AVX_BLOCK2_KERNEL -+ print *,"AVX2 optimized kernel (2 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK4_KERNEL -+ print *,"AVX2 optimized kernel (4 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK6_KERNEL -+ print *,"AVX2 optimized kernel (6 blocking) for real matrices" -+#endif -+ -+#else /* no HAVE_AVX2 */ -+ -+#ifdef HAVE_AVX -+ -+#ifdef WITH_REAL_AVX_BLOCK2_KERNEL -+ print *,"AVX optimized kernel (2 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK4_KERNEL -+ print *,"AVX optimized kernel (4 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK6_KERNEL -+ print *,"AVX optimized kernel (6 blocking) for real matrices" -+#endif -+ -+#endif -+ -+#endif /* HAVE_AVX2 */ -+ -+ -+#ifdef WITH_REAL_GENERIC_KERNEL -+ print *,"GENERIC kernel for real matrices" -+#endif -+#ifdef WITH_REAL_GENERIC_SIMPLE_KERNEL -+ print *,"GENERIC SIMPLE kernel for real matrices" -+#endif -+#ifdef WITH_REAL_SSE_ASSEMBLY_KERNEL -+ print *,"SSE ASSEMBLER kernel for real matrices" -+#endif -+#ifdef WITH_REAL_BGP_KERNEL -+ print *,"BGP kernel for real matrices" -+#endif -+#ifdef WITH_REAL_BGQ_KERNEL -+ print *,"BGQ kernel for real matrices" -+#endif -+ -+#endif /* DATATYPE == REAL */ -+ -+#ifdef COMPLEXCASE -+ -+#ifdef HAVE_AVX2 -+ -+#ifdef WITH_COMPLEX_AVX_BLOCK2_KERNEL -+ print *,"AVX2 optimized kernel (2 blocking) for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_AVX_BLOCK1_KERNEL -+ print *,"AVX2 optimized kernel (1 blocking) for complex matrices" -+#endif -+ -+#else /* no HAVE_AVX2 */ -+ -+#ifdef HAVE_AVX -+ -+#ifdef WITH_COMPLEX_AVX_BLOCK2_KERNEL -+ print *,"AVX optimized kernel (2 blocking) for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_AVX_BLOCK1_KERNEL -+ print *,"AVX optimized kernel (1 blocking) for complex matrices" -+#endif -+ -+#endif -+ -+#endif /* HAVE_AVX2 */ -+ -+ -+#ifdef WITH_COMPLEX_GENERIC_KERNEL -+ print *,"GENERIC kernel for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_GENERIC_SIMPLE_KERNEL -+ print *,"GENERIC SIMPLE kernel for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_SSE_ASSEMBLY_KERNEL -+ print *,"SSE ASSEMBLER kernel for complex matrices" -+#endif -+ -+#endif /* DATATYPE == COMPLEX */ -+ -+ endif -+#endif /* ELPA1 */ -+ -+ if (write_to_file%eigenvectors) then -+ if (myid .eq. 0) print *,"Writing Eigenvectors to files" -+ endif -+ -+ if (write_to_file%eigenvalues) then -+ if (myid .eq. 0) print *,"Writing Eigenvalues to files" -+ endif -+ -+ -diff -ruN elpa-2020.11.001/examples/Fortran/Makefile_examples_hybrid elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_hybrid ---- elpa-2020.11.001/examples/Fortran/Makefile_examples_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_hybrid 2021-07-02 10:49:17.191984000 +0200 -@@ -0,0 +1,38 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -qopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_MODULES) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+# GCC -+# F90 = mpif90 -O3 -fopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_MODULES) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+LIBS = -L$(ELPA_LIB) -lelpatest_openmp -lelpa_openmp $(SCALAPACK_LIB) $(MKL) -+# CC = mpicc -qopenmp -O3 -+# GCC -+# CC = mpicc -fopenmp -O3 -+ -+all: test_real_1stage_omp test_real_2stage_all_kernels_omp test_autotune_omp test_multiple_objs_omp test_split_comm_omp test_skewsymmetric_omp -+ -+test_real_1stage_omp: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP)/elpa -o test_real_1stage_omp.F90 test.F90 -+ $(F90) -o $@ test_real_1stage_omp.F90 $(LIBS) -+ -+test_real_2stage_all_kernels_omp: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP)/elpa -o test_real_2stage_all_kernels_omp.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels_omp.F90 $(LIBS) -+ -+test_autotune_omp: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs_omp: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm_omp: test_split_comm.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_split_comm.F90 $(LIBS) -+ -+test_skewsymmetric_omp: test_skewsymmetric.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_skewsymmetric.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Fortran/Makefile_examples_pure elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_pure ---- elpa-2020.11.001/examples/Fortran/Makefile_examples_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_pure 2021-07-02 10:53:24.225432000 +0200 -@@ -0,0 +1,34 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+# CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs test_split_comm test_skewsymmetric -+ -+test_real_1stage: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_1stage.F90 test.F90 -+ $(F90) -o $@ test_real_1stage.F90 $(LIBS) -+ -+test_real_2stage_all_kernels: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_2stage_all_kernels.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels.F90 $(LIBS) -+ -+test_autotune: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm: test_split_comm.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_split_comm.F90 $(LIBS) -+ -+test_skewsymmetric: test_skewsymmetric.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_skewsymmetric.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Fortran/Makefile_examples_pure_cuda elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_pure_cuda ---- elpa-2020.11.001/examples/Fortran/Makefile_examples_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_pure_cuda 2021-07-02 10:53:52.066370000 +0200 -@@ -0,0 +1,34 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -lcudart -+# CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs test_split_comm test_skewsymmetric -+ -+test_real_1stage: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_1stage.F90 test.F90 -+ $(F90) -o $@ test_real_1stage.F90 $(LIBS) -+ -+test_real_2stage_all_kernels: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_2stage_all_kernels.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels.F90 $(LIBS) -+ -+test_autotune: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_GPU=1 -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_GPU=1 -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm: test_split_comm.F90 -+ $(F90) -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_split_comm.F90 $(LIBS) -+ -+test_skewsymmetric: test_skewsymmetric.F90 -+ $(F90) -DTEST_REAL -DTEST_GPU=1 -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_skewsymmetric.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Fortran/test_autotune.F90 elpa-2020.11.001_ok/examples/Fortran/test_autotune.F90 ---- elpa-2020.11.001/examples/Fortran/test_autotune.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test_autotune.F90 2021-02-02 12:54:41.866864000 +0100 -@@ -0,0 +1,312 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+ class(elpa_autotune_t), pointer :: tune_state -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ as(:,:) = a(:,:) -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (layout .eq. 'C') then -+ call e%set("matrix_order",COLUMN_MAJOR_ORDER,error_elpa) -+ else -+ call e%set("matrix_order",ROW_MAJOR_ORDER,error_elpa) -+ endif -+ -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ call e%set("timings",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("debug",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("gpu", 0, error_elpa) -+ assert_elpa_ok(error_elpa) -+ !call e%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e%setup()) -+ -+ if (myid == 0) print *, "" -+ -+ tune_state => e%autotune_setup(ELPA_AUTOTUNE_FAST, AUTOTUNE_DOMAIN, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ iter=0 -+ do while (e%autotune_step(tune_state, error_elpa)) -+ assert_elpa_ok(error_elpa) -+ iter=iter+1 -+ write(iter_string,'(I5.5)') iter -+ !call e%print_settings() -+ !call e%store_settings("saved_parameters_"//trim(iter_string)//".txt") -+ call e%timer_start("eigenvectors: iteration "//trim(iter_string)) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("eigenvectors: iteration "//trim(iter_string)) -+ -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e%print_times("eigenvectors: iteration "//trim(iter_string)) -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ a(:,:) = as(:,:) -+ !call e%autotune_print_state(tune_state) -+ !call e%autotune_save_state(tune_state, "saved_state_"//trim(iter_string)//".txt") -+ end do -+ -+ ! set and print the autotuned-settings -+ call e%autotune_set_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "The best combination found by the autotuning:" -+ flush(output_unit) -+ call e%autotune_print_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ endif -+ ! de-allocate autotune object -+ call elpa_autotune_deallocate(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "Running once more time with the best found setting..." -+ endif -+ call e%timer_start("eigenvectors: best setting") -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("eigenvectors: best setting") -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e%print_times("eigenvectors: best setting") -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ -+ call elpa_deallocate(e,error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/test.F90 elpa-2020.11.001_ok/examples/Fortran/test.F90 ---- elpa-2020.11.001/examples/Fortran/test.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test.F90 2021-02-02 12:54:41.862023000 +0100 -@@ -0,0 +1,926 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE) ^ defined(TEST_SCALAPACK_ALL) ^ defined(TEST_SCALAPACK_PART)) -+error: define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE or TEST_SCALAPACK_ALL or TEST_SCALAPACK_PART -+#endif -+ -+#ifdef TEST_SOLVER_1STAGE -+#ifdef TEST_ALL_KERNELS -+error: TEST_ALL_KERNELS cannot be defined for TEST_SOLVER_1STAGE -+#endif -+#ifdef TEST_KERNEL -+error: TEST_KERNEL cannot be defined for TEST_SOLVER_1STAGE -+#endif -+#endif -+ -+#ifdef TEST_SOLVER_2STAGE -+#if !(defined(TEST_KERNEL) ^ defined(TEST_ALL_KERNELS)) -+error: define either TEST_ALL_KERNELS or a valid TEST_KERNEL -+#endif -+#endif -+ -+#ifdef TEST_GENERALIZED_DECOMP_EIGENPROBLEM -+#define TEST_GENERALIZED_EIGENPROBLEM -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+#ifdef TEST_REAL -+#define KERNEL_KEY "real_kernel" -+#endif -+#ifdef TEST_COMPLEX -+#define KERNEL_KEY "complex_kernel" -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+#ifdef WITH_SCALAPACK_TESTS -+ use test_scalapack -+#endif -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+#ifdef WITH_OPENMP -+ use omp_lib -+#endif -+ use precision_for_tests -+ -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_MPI_TYPE :: myidMPI, nprocsMPI -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ MATRIX_TYPE, allocatable :: b(:,:), c(:,:) -+#endif -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ MATRIX_TYPE, allocatable :: b(:,:), bs(:,:) -+#endif -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ logical :: check_all_evals, skip_check_correctness -+ -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ EV_TYPE, allocatable :: d(:), sd(:), ds(:), sds(:) -+ EV_TYPE :: diagonalELement, subdiagonalElement -+#endif -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+#ifdef TEST_ALL_KERNELS -+ TEST_INT_TYPE :: i -+#endif -+#ifdef TEST_ALL_LAYOUTS -+ character(len=1), parameter :: layouts(2) = [ 'C', 'R' ] -+ TEST_INT_TYPE :: i_layout -+#endif -+ integer(kind=c_int):: kernel -+ character(len=1) :: layout -+ logical :: do_test_numeric_residual, do_test_numeric_residual_generalized, & -+ do_test_analytic_eigenvalues, & -+ do_test_analytic_eigenvalues_eigenvectors, & -+ do_test_frank_eigenvalues, & -+ do_test_toeplitz_eigenvalues, do_test_cholesky, & -+ do_test_hermitian_multiply -+ logical :: ignoreError -+#ifdef WITH_OPENMP -+ TEST_INT_TYPE :: max_threads, threads_caller -+#endif -+ -+#ifdef SPLIT_COMM_MYSELF -+ TEST_INT_MPI_TYPE :: mpi_comm_rows, mpi_comm_cols, mpi_string_length, mpierr2 -+ character(len=MPI_MAX_ERROR_STRING) :: mpierr_string -+#endif -+ -+ ignoreError = .false. -+ -+ call read_input_parameters_traditional(na, nev, nblk, write_to_file, skip_check_correctness) -+ call setup_mpi(myid, nprocs) -+ -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ check_all_evals = .true. -+ -+ -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .false. -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ -+ do_test_cholesky = .false. -+#if defined(TEST_CHOLESKY) -+ do_test_cholesky = .true. -+#endif -+ do_test_hermitian_multiply = .false. -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ do_test_hermitian_multiply = .true. -+#endif -+ -+ status = 0 -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Program ' // TEST_CASE -+ print *, "" -+ endif -+ -+#ifdef TEST_ALL_LAYOUTS -+ do i_layout = 1, size(layouts) ! layouts -+ layout = layouts(i_layout) -+ do np_cols = 1, nprocs ! factors -+ if (mod(nprocs,np_cols) /= 0 ) then -+ cycle -+ endif -+#else -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+#endif -+ -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+#if TEST_QR_DECOMPOSITION == 1 -+ -+#if TEST_GPU == 1 -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+#endif /* TEST_GPU */ -+ if (nblk .lt. 64) then -+ if (myid .eq. 0) then -+ print *,"At the moment QR decomposition need blocksize of at least 64" -+ endif -+ if ((na .lt. 64) .and. (myid .eq. 0)) then -+ print *,"This is why the matrix size must also be at least 64 or only 1 MPI task can be used" -+ endif -+ -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+ endif -+#endif /* TEST_QR_DECOMPOSITION */ -+ -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, & -+ np_cols, layout, my_blacs_ctxt, my_prow, & -+ my_pcol) -+ -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) && defined(TEST_ALL_LAYOUTS) -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+#endif -+ -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+ allocate(b (na_rows,na_cols)) -+ allocate(c (na_rows,na_cols)) -+#endif -+ -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ allocate(b (na_rows,na_cols)) -+ allocate(bs (na_rows,na_cols)) -+#endif -+ -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ allocate(d (na), ds(na)) -+ allocate(sd (na), sds(na)) -+#endif -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+#if defined(TEST_MATRIX_RANDOM) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) && !defined(TEST_EIGENVALUES) -+ ! the random matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! RANDOM + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! RANDOM + TEST_CHOLESKY: wee need SPD matrix -+ ! RANDOM + TEST_EIGENVALUES: no correctness check known -+ -+ ! We also have to take care of special case in TEST_EIGENVECTORS -+#if !defined(TEST_EIGENVECTORS) -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+#else /* TEST_EIGENVECTORS */ -+ if (nev .ge. 1) then -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_numeric_residual = .true. -+#endif -+ else -+ if (myid .eq. 0) then -+ print *,"At the moment with the random matrix you need nev >=1" -+ endif -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+ endif -+#endif /* TEST_EIGENVECTORS */ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* (TEST_MATRIX_RANDOM) */ -+ -+#if defined(TEST_MATRIX_RANDOM) && defined(TEST_CHOLESKY) -+ call prepare_matrix_random_spd(na, myid, sc_desc, a, z, as, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* TEST_MATRIX_RANDOM and TEST_CHOLESKY */ -+ -+#if defined(TEST_MATRIX_RANDOM) && defined(TEST_GENERALIZED_EIGENPROBLEM) -+ ! call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ call prepare_matrix_random_spd(na, myid, sc_desc, b, z, bs, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .true. -+#endif /* TEST_MATRIX_RANDOM and TEST_GENERALIZED_EIGENPROBLEM */ -+ -+#if defined(TEST_MATRIX_RANDOM) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_EIGENVALUES)) -+#error "Random matrix is not allowed in this configuration" -+#endif -+ -+#if defined(TEST_MATRIX_ANALYTIC) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) -+ ! the analytic matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! ANALYTIC + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! ANALTIC + TEST_CHOLESKY: no correctness check yet implemented -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ as(:,:) = a -+ -+ do_test_numeric_residual = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_analytic_eigenvalues = .true. -+#endif -+#if defined(TEST_EIGENVECTORS) -+ if (nev .ge. 1) then -+ do_test_analytic_eigenvalues_eigenvectors = .true. -+ do_test_analytic_eigenvalues = .false. -+ else -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ endif -+#endif -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* TEST_MATRIX_ANALYTIC */ -+#if defined(TEST_MATRIX_ANALYTIC) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_CHOLESKY)) -+#error "Analytic matrix is not allowd in this configuration" -+#endif -+ -+#if defined(TEST_MATRIX_TOEPLITZ) -+ ! The Toeplitz matrix works in each test -+#ifdef TEST_SINGLE -+ diagonalElement = 0.45_c_float -+ subdiagonalElement = 0.78_c_float -+#else -+ diagonalElement = 0.45_c_double -+ subdiagonalElement = 0.78_c_double -+#endif -+ -+! actually we test cholesky for diagonal matrix only -+#if defined(TEST_CHOLESKY) -+#ifdef TEST_SINGLE -+ diagonalElement = (2.546_c_float, 0.0_c_float) -+ subdiagonalElement = (0.0_c_float, 0.0_c_float) -+#else -+ diagonalElement = (2.546_c_double, 0.0_c_double) -+ subdiagonalElement = (0.0_c_double, 0.0_c_double) -+#endif -+#endif /* TEST_CHOLESKY */ -+ -+ call prepare_matrix_toeplitz(na, diagonalElement, subdiagonalElement, & -+ d, sd, ds, sds, a, as, nblk, np_rows, & -+ np_cols, my_prow, my_pcol) -+ -+ -+ do_test_numeric_residual = .false. -+#if defined(TEST_EIGENVECTORS) -+ if (nev .ge. 1) then -+ do_test_numeric_residual = .true. -+ else -+ do_test_numeric_residual = .false. -+ endif -+#endif -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+#if defined(TEST_CHOLESKY) -+ do_test_toeplitz_eigenvalues = .false. -+#else -+ do_test_toeplitz_eigenvalues = .true. -+#endif -+ -+#endif /* TEST_MATRIX_TOEPLITZ */ -+ -+ -+#if defined(TEST_MATRIX_FRANK) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) -+ ! the random matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! FRANK + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! FRANK + TEST_CHOLESKY: no correctness check yet implemented -+ -+ ! We also have to take care of special case in TEST_EIGENVECTORS -+#if !defined(TEST_EIGENVECTORS) -+ call prepare_matrix_frank(na, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ -+#else /* TEST_EIGENVECTORS */ -+ -+ if (nev .ge. 1) then -+ call prepare_matrix_frank(na, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ else -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ -+ endif -+ -+#endif /* TEST_EIGENVECTORS */ -+#endif /* (TEST_MATRIX_FRANK) */ -+#if defined(TEST_MATRIX_FRANK) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_CHOLESKY)) -+#error "FRANK matrix is not allowed in this configuration" -+#endif -+ -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+#ifdef TEST_REAL -+ -+#ifdef TEST_DOUBLE -+ b(:,:) = 2.0_c_double * a(:,:) -+ c(:,:) = 0.0_c_double -+#else -+ b(:,:) = 2.0_c_float * a(:,:) -+ c(:,:) = 0.0_c_float -+#endif -+ -+#endif /* TEST_REAL */ -+ -+#ifdef TEST_COMPLEX -+ -+#ifdef TEST_DOUBLE -+ b(:,:) = 2.0_c_double * a(:,:) -+ c(:,:) = (0.0_c_double, 0.0_c_double) -+#else -+ b(:,:) = 2.0_c_float * a(:,:) -+ c(:,:) = (0.0_c_float, 0.0_c_float) -+#endif -+ -+#endif /* TEST_COMPLEX */ -+ -+#endif /* TEST_HERMITIAN_MULTIPLY */ -+ -+! if the test is used for (repeated) performacne tests, one might want to skip the checking -+! of the results, which might be time-consuming and not necessary. -+ if(skip_check_correctness) then -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .false. -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_cholesky = .false. -+ endif -+ -+ -+#ifdef WITH_OPENMP -+ threads_caller = omp_get_max_threads() -+ if (myid == 0) then -+ print *,"The calling program uses ",threads_caller," threads" -+ endif -+#endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (layout .eq. 'C') then -+ call e%set("matrix_order",COLUMN_MAJOR_ORDER,error_elpa) -+ else -+ call e%set("matrix_order",ROW_MAJOR_ORDER,error_elpa) -+ endif -+ -+#ifdef WITH_MPI -+#ifdef SPLIT_COMM_MYSELF -+ call mpi_comm_split(MPI_COMM_WORLD, int(my_pcol,kind=MPI_KIND), int(my_prow,kind=MPI_KIND), & -+ mpi_comm_rows, mpierr) -+ if (mpierr .ne. MPI_SUCCESS) then -+ call MPI_ERROR_STRING(mpierr, mpierr_string, mpi_string_length, mpierr2) -+ write(error_unit,*) "MPI ERROR occured during mpi_comm_split for row communicator: ", trim(mpierr_string) -+ stop 1 -+ endif -+ -+ call mpi_comm_split(MPI_COMM_WORLD, int(my_prow,kind=MPI_KIND), int(my_pcol,kind=MPI_KIND), & -+ mpi_comm_cols, mpierr) -+ if (mpierr .ne. MPI_SUCCESS) then -+ call MPI_ERROR_STRING(mpierr,mpierr_string, mpi_string_length, mpierr2) -+ write(error_unit,*) "MPI ERROR occured during mpi_comm_split for col communicator: ", trim(mpierr_string) -+ stop 1 -+ endif -+ -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("mpi_comm_rows", int(mpi_comm_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("mpi_comm_cols", int(mpi_comm_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#else -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ call e%set("blacs_context", int(my_blacs_ctxt,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ call e%set("timings", 1_ik, error_elpa) -+ assert_elpa_ok(e%setup()) -+ -+#ifdef TEST_SOLVER_1STAGE -+ call e%set("solver", ELPA_SOLVER_1STAGE, error_elpa) -+#else -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+#endif -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("gpu", TEST_GPU, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%set("qr", 1_ik, error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+#ifdef WITH_OPENMP -+ max_threads=omp_get_max_threads() -+ call e%set("omp_threads", int(max_threads,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ if (myid == 0) print *, "" -+ -+#ifdef TEST_ALL_KERNELS -+ do i = 0, elpa_option_cardinality(KERNEL_KEY) ! kernels -+ if (TEST_GPU .eq. 0) then -+ kernel = elpa_option_enumerate(KERNEL_KEY, int(i,kind=c_int)) -+ if (kernel .eq. ELPA_2STAGE_REAL_GPU) continue -+ if (kernel .eq. ELPA_2STAGE_COMPLEX_GPU) continue -+ endif -+#endif -+#ifdef TEST_KERNEL -+ kernel = TEST_KERNEL -+#endif -+ -+#ifdef TEST_SOLVER_2STAGE -+#if TEST_GPU == 1 -+#if defined TEST_REAL -+ kernel = ELPA_2STAGE_REAL_GPU -+#endif -+#if defined TEST_COMPLEX -+ kernel = ELPA_2STAGE_COMPLEX_GPU -+#endif -+#endif -+ call e%set(KERNEL_KEY, kernel, error_elpa) -+#ifdef TEST_KERNEL -+ assert_elpa_ok(error_elpa) -+#else -+ if (error_elpa /= ELPA_OK) then -+ cycle -+ endif -+ ! actually used kernel might be different if forced via environment variables -+ call e%get(KERNEL_KEY, kernel, error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ if (myid == 0) then -+ print *, elpa_int_value_to_string(KERNEL_KEY, kernel) // " kernel" -+ endif -+#endif -+ -+ -+! print all parameters -+ call e%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef TEST_ALL_KERNELS -+ call e%timer_start(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#endif -+ -+ ! The actual solve step -+#if defined(TEST_EIGENVECTORS) -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%timer_start("e%eigenvectors_qr()") -+#else -+ call e%timer_start("e%eigenvectors()") -+#endif -+#ifdef TEST_SCALAPACK_ALL -+ call solve_scalapack_all(na, a, sc_desc, ev, z) -+#elif TEST_SCALAPACK_PART -+ call solve_scalapack_part(na, a, sc_desc, nev, ev, z) -+ check_all_evals = .false. ! scalapack does not compute all eigenvectors -+#else -+ call e%eigenvectors(a, ev, z, error_elpa) -+#endif -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%timer_stop("e%eigenvectors_qr()") -+#else -+ call e%timer_stop("e%eigenvectors()") -+#endif -+#endif /* TEST_EIGENVECTORS */ -+ -+#ifdef TEST_EIGENVALUES -+ call e%timer_start("e%eigenvalues()") -+ call e%eigenvalues(a, ev, error_elpa) -+ call e%timer_stop("e%eigenvalues()") -+#endif -+ -+#if defined(TEST_SOLVE_TRIDIAGONAL) -+ call e%timer_start("e%solve_tridiagonal()") -+ call e%solve_tridiagonal(d, sd, z, error_elpa) -+ call e%timer_stop("e%solve_tridiagonal()") -+ ev(:) = d(:) -+#endif -+ -+#if defined(TEST_CHOLESKY) -+ call e%timer_start("e%cholesky()") -+ call e%cholesky(a, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("e%cholesky()") -+#endif -+ -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ call e%timer_start("e%hermitian_multiply()") -+ call e%hermitian_multiply('F','F', int(na,kind=c_int), a, b, int(na_rows,kind=c_int), & -+ int(na_cols,kind=c_int), c, int(na_rows,kind=c_int), & -+ int(na_cols,kind=c_int), error_elpa) -+ call e%timer_stop("e%hermitian_multiply()") -+#endif -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ call e%timer_start("e%generalized_eigenvectors()") -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ call e%timer_start("is_already_decomposed=.false.") -+#endif -+ call e%generalized_eigenvectors(a, b, ev, z, .false., error_elpa) -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ call e%timer_stop("is_already_decomposed=.false.") -+ a = as -+ call e%timer_start("is_already_decomposed=.true.") -+ call e%generalized_eigenvectors(a, b, ev, z, .true., error_elpa) -+ call e%timer_stop("is_already_decomposed=.true.") -+#endif -+ call e%timer_stop("e%generalized_eigenvectors()") -+#endif -+ -+ assert_elpa_ok(error_elpa) -+ -+#ifdef TEST_ALL_KERNELS -+ call e%timer_stop(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#endif -+ -+ if (myid .eq. 0) then -+#ifdef TEST_ALL_KERNELS -+ call e%print_times(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#else /* TEST_ALL_KERNELS */ -+ -+#if defined(TEST_EIGENVECTORS) -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%print_times("e%eigenvectors_qr()") -+#else -+ call e%print_times("e%eigenvectors()") -+#endif -+#endif -+#ifdef TEST_EIGENVALUES -+ call e%print_times("e%eigenvalues()") -+#endif -+#ifdef TEST_SOLVE_TRIDIAGONAL -+ call e%print_times("e%solve_tridiagonal()") -+#endif -+#ifdef TEST_CHOLESKY -+ call e%print_times("e%cholesky()") -+#endif -+#ifdef TEST_HERMITIAN_MULTIPLY -+ call e%print_times("e%hermitian_multiply()") -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ call e%print_times("e%generalized_eigenvectors()") -+#endif -+#endif /* TEST_ALL_KERNELS */ -+ endif -+ -+ if (do_test_analytic_eigenvalues) then -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, & -+ my_prow, my_pcol, check_all_evals, .false.) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_analytic_eigenvalues_eigenvectors) then -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, & -+ my_prow, my_pcol, check_all_evals, .true.) -+ call check_status(status, myid) -+ endif -+ -+ if(do_test_numeric_residual) then -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, & -+ np_rows,np_cols, my_prow, my_pcol) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_frank_eigenvalues) then -+ status = check_correctness_eigenvalues_frank(na, ev, z, myid) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_toeplitz_eigenvalues) then -+#if defined(TEST_EIGENVALUES) || defined(TEST_SOLVE_TRIDIAGONAL) -+ status = check_correctness_eigenvalues_toeplitz(na, diagonalElement, & -+ subdiagonalElement, ev, z, myid) -+ call check_status(status, myid) -+#endif -+ endif -+ -+ if (do_test_cholesky) then -+ status = check_correctness_cholesky(na, a, as, na_rows, sc_desc, myid ) -+ call check_status(status, myid) -+ endif -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+ if (do_test_hermitian_multiply) then -+ status = check_correctness_hermitian_multiply(na, a, b, c, na_rows, sc_desc, myid ) -+ call check_status(status, myid) -+ endif -+#endif -+ -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ if(do_test_numeric_residual_generalized) then -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, & -+ np_cols, my_prow, & -+ my_pcol, bs) -+ call check_status(status, myid) -+ endif -+#endif -+ -+ -+#ifdef WITH_OPENMP -+ if (threads_caller .ne. omp_get_max_threads()) then -+ if (myid .eq. 0) then -+ print *, " ERROR! the number of OpenMP threads has not been restored correctly" -+ endif -+ status = 1 -+ endif -+#endif -+ if (myid == 0) then -+ print *, "" -+ endif -+ -+#ifdef TEST_ALL_KERNELS -+ a(:,:) = as(:,:) -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ d = ds -+ sd = sds -+#endif -+ end do ! kernels -+#endif -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+#ifdef TEST_HERMITIAN_MULTIPLY -+ deallocate(b) -+ deallocate(c) -+#endif -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ deallocate(d, ds) -+ deallocate(sd, sds) -+#endif -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ deallocate(b, bs) -+#endif -+ -+#ifdef TEST_ALL_LAYOUTS -+ end do ! factors -+ end do ! layouts -+#endif -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ -+ contains -+ -+ subroutine check_status(status, myid) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: status, myid -+ TEST_INT_MPI_TYPE :: mpierr -+ if (status /= 0) then -+ if (myid == 0) print *, "Result incorrect!" -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/test_multiple_objs.F90 elpa-2020.11.001_ok/examples/Fortran/test_multiple_objs.F90 ---- elpa-2020.11.001/examples/Fortran/test_multiple_objs.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test_multiple_objs.F90 2021-02-02 12:54:41.866751000 +0100 -@@ -0,0 +1,379 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+ -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_TYPE :: ierr -+ TEST_INT_MPI_TYPE :: mpierr -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e1, e2, e_ptr -+ class(elpa_autotune_t), pointer :: tune_state -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ TEST_INT_TYPE :: timings, debug, gpu -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ as(:,:) = a(:,:) -+ -+ e1 => elpa_allocate(error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+ call set_basic_params(e1, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e1%set("timings",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("debug",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("gpu", 0, error_elpa) -+ assert_elpa_ok(error_elpa) -+ !call e1%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e1%setup()) -+ -+ call e1%store_settings("initial_parameters.txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ ! barrier after store settings, file created from one MPI rank only, but loaded everywhere -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ -+ ! try to load parameters into another object -+ e2 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call set_basic_params(e2, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ call e2%load_settings("initial_parameters.txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert_elpa_ok(e2%setup()) -+ -+ ! test whether the user setting of e1 are correctly loade to e2 -+ call e2%get("timings", int(timings,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%get("debug", int(debug,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%get("gpu", int(gpu,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if ((timings .ne. 1) .or. (debug .ne. 1) .or. (gpu .ne. 0)) then -+ print *, "Parameters not stored or loaded correctly. Aborting...", timings, debug, gpu -+ stop 1 -+ endif -+ -+ if(myid == 0) print *, "parameters of e1" -+ call e1%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if(myid == 0) print *, "" -+ if(myid == 0) print *, "parameters of e2" -+ call e2%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ e_ptr => e2 -+ -+ -+ tune_state => e_ptr%autotune_setup(ELPA_AUTOTUNE_FAST, AUTOTUNE_DOMAIN, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ iter=0 -+ do while (e_ptr%autotune_step(tune_state, error_elpa)) -+ assert_elpa_ok(error_elpa) -+ -+ iter=iter+1 -+ write(iter_string,'(I5.5)') iter -+ call e_ptr%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%store_settings("saved_parameters_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%timer_start("eigenvectors: iteration "//trim(iter_string)) -+ call e_ptr%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e_ptr%timer_stop("eigenvectors: iteration "//trim(iter_string)) -+ -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e_ptr%print_times("eigenvectors: iteration "//trim(iter_string)) -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ a(:,:) = as(:,:) -+ call e_ptr%autotune_print_state(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%autotune_save_state(tune_state, "saved_state_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ ! barrier after save state, file created from one MPI rank only, but loaded everywhere -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call e_ptr%autotune_load_state(tune_state, "saved_state_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ end do -+ -+ ! set and print the autotuned-settings -+ call e_ptr%autotune_set_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "The best combination found by the autotuning:" -+ flush(output_unit) -+ call e_ptr%autotune_print_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ endif -+ ! de-allocate autotune object -+ call elpa_autotune_deallocate(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "Running once more time with the best found setting..." -+ endif -+ call e_ptr%timer_start("eigenvectors: best setting") -+ call e_ptr%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%timer_stop("eigenvectors: best setting") -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e_ptr%print_times("eigenvectors: best setting") -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ -+ call elpa_deallocate(e_ptr, error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol -+ -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call elpa%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/test_skewsymmetric.F90 elpa-2020.11.001_ok/examples/Fortran/test_skewsymmetric.F90 ---- elpa-2020.11.001/examples/Fortran/test_skewsymmetric.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test_skewsymmetric.F90 2021-02-02 12:54:41.850860000 +0100 -@@ -0,0 +1,400 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# define EV_TYPE_COMPLEX complex(kind=C_FLOAT_COMPLEX) -+# define MATRIX_TYPE_COMPLEX complex(kind=C_FLOAT_COMPLEX) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define MATRIX_TYPE_COMPLEX complex(kind=C_DOUBLE_COMPLEX) -+# define EV_TYPE_COMPLEX complex(kind=C_DOUBLE_COMPLEX) -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use precision_for_tests -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a_skewsymmetric(:,:), as_skewsymmetric(:,:) -+ MATRIX_TYPE_COMPLEX, allocatable :: a_complex(:,:), as_complex(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z_skewsymmetric(:,:) -+ MATRIX_TYPE_COMPLEX, allocatable :: z_complex(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev_skewsymmetric(:), ev_complex(:) -+ -+ TEST_INT_TYPE :: status, i, j -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e_complex, e_skewsymmetric -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+! -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, & -+ np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a_skewsymmetric (na_rows,na_cols)) -+ allocate(as_skewsymmetric(na_rows,na_cols)) -+ allocate(z_skewsymmetric (na_rows,2*na_cols)) -+ allocate(ev_skewsymmetric(na)) -+ -+ a_skewsymmetric(:,:) = 0.0 -+ z_skewsymmetric(:,:) = 0.0 -+ ev_skewsymmetric(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a_skewsymmetric, & -+ z_skewsymmetric(:,1:na_cols), as_skewsymmetric, is_skewsymmetric=1) -+ -+ !call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ as_skewsymmetric(:,:) = a_skewsymmetric(:,:) -+ -+ -+ ! prepare the complex matrix for the "brute force" case -+ allocate(a_complex (na_rows,na_cols)) -+ allocate(as_complex(na_rows,na_cols)) -+ allocate(z_complex (na_rows,na_cols)) -+ allocate(ev_complex(na)) -+ -+ a_complex(1:na_rows,1:na_cols) = 0.0 -+ z_complex(1:na_rows,1:na_cols) = 0.0 -+ as_complex(1:na_rows,1:na_cols) = 0.0 -+ -+ -+ do j=1, na_cols -+ do i=1,na_rows -+#ifdef TEST_DOUBLE -+ a_complex(i,j) = dcmplx(0.0, a_skewsymmetric(i,j)) -+#endif -+#ifdef TEST_SINGLE -+ a_complex(i,j) = cmplx(0.0, a_skewsymmetric(i,j)) -+#endif -+ enddo -+ enddo -+ -+ -+ -+ z_complex(1:na_rows,1:na_cols) = a_complex(1:na_rows,1:na_cols) -+ as_complex(1:na_rows,1:na_cols) = a_complex(1:na_rows,1:na_cols) -+ -+ ! first set up and solve the brute force problem -+ e_complex => elpa_allocate(error_elpa) -+ call set_basic_params(e_complex, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e_complex%set("timings",1, error_elpa) -+ -+ call e_complex%set("debug",1,error_elpa) -+ call e_complex%set("gpu", 0,error_elpa) -+ call e_complex%set("omp_threads", 8, error_elpa) -+ -+ assert_elpa_ok(e_complex%setup()) -+ call e_complex%set("solver", elpa_solver_2stage, error_elpa) -+ -+ call e_complex%timer_start("eigenvectors: brute force as complex matrix") -+ call e_complex%eigenvectors(a_complex, ev_complex, z_complex, error_elpa) -+ call e_complex%timer_stop("eigenvectors: brute force as complex matrix") -+ -+ if (myid .eq. 0) then -+ print *, "" -+ call e_complex%print_times("eigenvectors: brute force as complex matrix") -+ endif -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+! as_complex(:,:) = z_complex(:,:) -+#ifdef TEST_SINGLE -+ status = check_correctness_evp_numeric_residuals_complex_single(na, nev, as_complex, z_complex, ev_complex, sc_desc, & -+ nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#else -+ status = check_correctness_evp_numeric_residuals_complex_double(na, nev, as_complex, z_complex, ev_complex, sc_desc, & -+ nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#endif -+ status = 0 -+ call check_status(status, myid) -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ ! now run the skewsymmetric case -+ e_skewsymmetric => elpa_allocate(error_elpa) -+ call set_basic_params(e_skewsymmetric, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e_skewsymmetric%set("timings",1, error_elpa) -+ -+ call e_skewsymmetric%set("debug",1,error_elpa) -+ call e_skewsymmetric%set("gpu", 0,error_elpa) -+ call e_skewsymmetric%set("omp_threads",8, error_elpa) -+ -+ assert_elpa_ok(e_skewsymmetric%setup()) -+ -+ call e_skewsymmetric%set("solver", elpa_solver_2stage, error_elpa) -+ -+ call e_skewsymmetric%timer_start("eigenvectors: skewsymmetric ") -+ call e_skewsymmetric%skew_eigenvectors(a_skewsymmetric, ev_skewsymmetric, z_skewsymmetric, error_elpa) -+ call e_skewsymmetric%timer_stop("eigenvectors: skewsymmetric ") -+ -+ if (myid .eq. 0) then -+ print *, "" -+ call e_skewsymmetric%print_times("eigenvectors: skewsymmetric") -+ endif -+ -+ ! check eigenvalues -+ do i=1, na -+ if (myid == 0) then -+#ifdef TEST_DOUBLE -+ if (abs(ev_complex(i)-ev_skewsymmetric(i))/abs(ev_complex(i)) .gt. 1e-10) then -+#endif -+#ifdef TEST_SINGLE -+ if (abs(ev_complex(i)-ev_skewsymmetric(i))/abs(ev_complex(i)) .gt. 1e-4) then -+#endif -+ print *,"ev: i=",i,ev_complex(i),ev_skewsymmetric(i) -+ status = 1 -+ endif -+ endif -+ enddo -+ -+ -+! call check_status(status, myid) -+ -+ z_complex(:,:) = 0 -+ do j=1, na_cols -+ do i=1,na_rows -+#ifdef TEST_DOUBLE -+ z_complex(i,j) = dcmplx(z_skewsymmetric(i,j), z_skewsymmetric(i,na_cols+j)) -+#endif -+#ifdef TEST_SINGLE -+ z_complex(i,j) = cmplx(z_skewsymmetric(i,j), z_skewsymmetric(i,na_cols+j)) -+#endif -+ enddo -+ enddo -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ -+#ifdef TEST_SINGLE -+ status = check_correctness_evp_numeric_residuals_ss_real_single(na, nev, as_skewsymmetric, z_complex, ev_skewsymmetric, & -+ sc_desc, nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#else -+ status = check_correctness_evp_numeric_residuals_ss_real_double(na, nev, as_skewsymmetric, z_complex, ev_skewsymmetric, & -+ sc_desc, nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#endif -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call elpa_deallocate(e_complex,error_elpa) -+ call elpa_deallocate(e_skewsymmetric,error_elpa) -+ -+ -+ !to do -+ ! - check whether brute-force check_correctness_evp_numeric_residuals worsk (complex ev) -+ ! - invent a test for skewsymmetric residuals -+ -+ deallocate(a_complex) -+ deallocate(as_complex) -+ deallocate(z_complex) -+ deallocate(ev_complex) -+ -+ deallocate(a_skewsymmetric) -+ deallocate(as_skewsymmetric) -+ deallocate(z_skewsymmetric) -+ deallocate(ev_skewsymmetric) -+ call elpa_uninit(error_elpa) -+ -+ -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol -+ -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call elpa%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ subroutine check_status(status, myid) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: status, myid -+ TEST_INT_MPI_TYPE :: mpierr -+ if (status /= 0) then -+ if (myid == 0) print *, "Result incorrect!" -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ endif -+ end subroutine -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/test_split_comm.F90 elpa-2020.11.001_ok/examples/Fortran/test_split_comm.F90 ---- elpa-2020.11.001/examples/Fortran/test_split_comm.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test_split_comm.F90 2021-02-02 12:54:41.854142000 +0100 -@@ -0,0 +1,340 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ TEST_INT_TYPE :: num_groups, group_size, color, key -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr, ierr,mpi_sub_commMPI, myidMPI, nprocsMPI, colorMPI, keyMPI, & -+ myid_subMPI, nprocs_subMPI -+ TEST_INT_TYPE :: mpi_sub_comm -+ TEST_INT_TYPE :: myid_sub, nprocs_sub -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ -+ status = 0 -+#ifdef WITH_MPI -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !call setup_mpi(myid, nprocs) -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world, myidMPI,mpierr) -+ call mpi_comm_size(mpi_comm_world, nprocsMPI,mpierr) -+ myid = int(myidMPI,kind=BLAS_KIND) -+ nprocs = int(nprocsMPI,kind=BLAS_KIND) -+ -+ if((mod(nprocs, 4) == 0) .and. (nprocs > 4)) then -+ num_groups = 4 -+ else if(mod(nprocs, 3) == 0) then -+ num_groups = 3 -+ else if(mod(nprocs, 2) == 0) then -+ num_groups = 2 -+ else -+ num_groups = 1 -+ endif -+ -+ group_size = nprocs / num_groups -+ -+ if(num_groups * group_size .ne. nprocs) then -+ print *, "Something went wrong before splitting the communicator" -+ stop 1 -+ else -+ if(myid == 0) then -+ print '((a,i0,a,i0))', "The test will split the global communicator into ", num_groups, " groups of size ", group_size -+ endif -+ endif -+ -+ ! each group of processors will have the same color -+ color = mod(myid, num_groups) -+ ! this will determine the myid in each group -+ key = myid/num_groups -+ !split the communicator -+ colorMPI=int(color,kind=MPI_KIND) -+ keyMPI = int(key, kind=MPI_KIND) -+ call mpi_comm_split(mpi_comm_world, colorMPI, keyMPI, mpi_sub_commMPI, mpierr) -+ mpi_sub_comm = int(mpi_sub_commMPI,kind=BLAS_KIND) -+ color = int(colorMPI,kind=BLAS_KIND) -+ key = int(keyMPI,kind=BLAS_KIND) -+ if(mpierr .ne. MPI_SUCCESS) then -+ print *, "communicator splitting not successfull", mpierr -+ stop 1 -+ endif -+ -+ call mpi_comm_rank(mpi_sub_commMPI, myid_subMPI, mpierr) -+ call mpi_comm_size(mpi_sub_commMPI, nprocs_subMPI, mpierr) -+ myid_sub = int(myid_subMPI,kind=BLAS_KIND) -+ nprocs_sub = int(nprocs_subMPI,kind=BLAS_KIND) -+ -+ !print *, "glob ", myid, nprocs, ", loc ", myid_sub, nprocs_sub, ", color ", color, ", key ", key -+ -+ if((mpierr .ne. MPI_SUCCESS) .or. (nprocs_sub .ne. group_size) .or. (myid_sub >= group_size)) then -+ print *, "something wrong with the sub communicators" -+ stop 1 -+ endif -+ -+ -+#ifdef HAVE_REDIRECT -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs_sub))),2,-1 -+ if(mod(nprocs_sub,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs_sub/np_cols -+ assert(nprocs_sub == np_rows * np_cols) -+ assert(nprocs == np_rows * np_cols * num_groups) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+ print '(a)', 'Process layout: ' // layout -+ print *,'' -+ endif -+ if (myid_sub == 0) then -+ print '(4(a,i0))','GROUP ', color, ': Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs_sub -+ endif -+ -+ ! USING the subcommunicator -+ call set_up_blacsgrid(int(mpi_sub_comm,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ !call prepare_matrix_analytic(na, a, nblk, myid_sub, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ call prepare_matrix_random(na, myid_sub, sc_desc, a, z, as) -+ as(:,:) = a(:,:) -+ -+ e => elpa_allocate(error_elpa) -+ call set_basic_params(e, na, nev, na_rows, na_cols, mpi_sub_comm, my_prow, my_pcol) -+ -+ call e%set("timings",1, error_elpa) -+ -+ call e%set("debug",1, error_elpa) -+ call e%set("gpu", 0, error_elpa) -+ !call e%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e%setup()) -+ -+ -+ -+! if(myid == 0) print *, "parameters of e" -+! call e%print_all_parameters() -+! if(myid == 0) print *, "" -+ -+ -+ call e%timer_start("eigenvectors") -+ call e%eigenvectors(a, ev, z, error_elpa) -+ call e%timer_stop("eigenvectors") -+ -+ assert_elpa_ok(error_elpa) -+ -+ !status = check_correctness_analytic(na, nev, ev, z, nblk, myid_sub, np_rows, np_cols, my_prow, my_pcol, & -+ ! .true., .true., print_times=.false.) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid_sub, & -+ np_rows,np_cols, my_prow, my_pcol) -+ if (status /= 0) & -+ print *, "processor ", myid, ": Result incorrect for processor group ", color -+ -+ if (myid .eq. 0) then -+ print *, "Showing times of one goup only" -+ call e%print_times("eigenvectors") -+ endif -+ -+ call elpa_deallocate(e, error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+#endif -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, communicator, my_prow, my_pcol) -+ use iso_c_binding -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol, communicator -+ -+#ifdef WITH_MPI -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa%set("mpi_comm_parent", int(communicator,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.11.001/examples/Makefile_hybrid elpa-2020.11.001_ok/examples/Makefile_hybrid ---- elpa-2020.11.001/examples/Makefile_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Makefile_hybrid 2021-07-02 10:19:18.864600000 +0200 -@@ -0,0 +1,24 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -qopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+# GCC -+# F90 = mpif90 -O3 -fopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+LIBS = -L$(ELPA_LIB_OPENMP) -lelpa_openmp -lelpatest_openmp -lelpa $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -qopenmp -+# GCC -+# CC = mpicc -O3 -fopenmp -+ -+all: test_real_e1_omp test_real_e2_omp -+ -+test_real_e1_omp: test_real_e1.F90 -+ $(F90) -DWITH_OPENMP -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2_omp: test_real_e2.F90 -+ $(F90) -DWITH_OPENMP -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Makefile_pure elpa-2020.11.001_ok/examples/Makefile_pure ---- elpa-2020.11.001/examples/Makefile_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Makefile_pure 2021-07-02 10:19:26.723701000 +0200 -@@ -0,0 +1,20 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -+ -+all: test_real_e1 test_real_e2 -+ -+test_real_e1: test_real_e1.F90 -+ $(F90) -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2: test_real_e2.F90 -+ $(F90) -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Makefile_pure_cuda elpa-2020.11.001_ok/examples/Makefile_pure_cuda ---- elpa-2020.11.001/examples/Makefile_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Makefile_pure_cuda 2021-07-02 10:19:34.549476000 +0200 -@@ -0,0 +1,20 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -lcudart -+CC = mpicc -O3 -+ -+all: test_real_e1 test_real_e2 -+ -+test_real_e1: test_real_e1.F90 -+ $(F90) -DCUDA -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2: test_real_e2.F90 -+ $(F90) -DCUDA -DCUDAKERNEL -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/shared/mod_tests_blas_interfaces.F90 elpa-2020.11.001_ok/examples/shared/mod_tests_blas_interfaces.F90 ---- elpa-2020.11.001/examples/shared/mod_tests_blas_interfaces.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/mod_tests_blas_interfaces.F90 2021-02-02 12:54:50.045819000 +0100 -@@ -0,0 +1,53 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! https://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! This file was written by A. Marek, MPCDF -+ -+#include "config-f90.h" -+#define PRECISION_MODULE precision_for_tests -+module tests_blas_interfaces -+ use iso_c_binding -+ use precision_for_tests -+ -+ implicit none -+ -+#include "../../src/helpers/fortran_blas_interfaces.F90" -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/mod_tests_scalapack_interfaces.F90 elpa-2020.11.001_ok/examples/shared/mod_tests_scalapack_interfaces.F90 ---- elpa-2020.11.001/examples/shared/mod_tests_scalapack_interfaces.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/mod_tests_scalapack_interfaces.F90 2021-02-02 12:54:50.050096000 +0100 -@@ -0,0 +1,56 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! https://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! This file was written by A. Marek, MPCDF -+ -+ -+#include "config-f90.h" -+#define PRECISION_MODULE precision_for_tests -+module tests_scalapack_interfaces -+ use iso_c_binding -+ use precision_for_tests -+ -+ implicit none -+ -+#include "../../src/helpers/fortran_scalapack_interfaces.F90" -+ -+end module -+ -+ -diff -ruN elpa-2020.11.001/examples/shared/test_analytic.F90 elpa-2020.11.001_ok/examples/shared/test_analytic.F90 ---- elpa-2020.11.001/examples/shared/test_analytic.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_analytic.F90 2021-02-02 12:54:50.050664000 +0100 -@@ -0,0 +1,190 @@ -+! (c) Copyright Pavel Kus, 2017, MPCDF -+! -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+ -+#include "../Fortran/assert.h" -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+module test_analytic -+ -+ use test_util -+#ifdef HAVE_DETAILED_TIMINGS -+ use ftimings -+#else -+ use timings_dummy -+#endif -+ use precision_for_tests -+ -+ interface prepare_matrix_analytic -+ module procedure prepare_matrix_analytic_complex_double -+ module procedure prepare_matrix_analytic_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_analytic_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_analytic_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_analytic -+ module procedure check_correctness_analytic_complex_double -+ module procedure check_correctness_analytic_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_analytic_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_analytic_complex_single -+#endif -+ end interface -+ -+ -+ interface print_matrix -+ module procedure print_matrix_complex_double -+ module procedure print_matrix_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure print_matrix_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure print_matrix_complex_single -+#endif -+ end interface -+ -+ TEST_INT_TYPE, parameter, private :: num_primes = 3 -+ TEST_INT_TYPE, parameter, private :: primes(num_primes) = (/2,3,5/) -+ -+ TEST_INT_TYPE, parameter, private :: ANALYTIC_MATRIX = 0 -+ TEST_INT_TYPE, parameter, private :: ANALYTIC_EIGENVECTORS = 1 -+ TEST_INT_TYPE, parameter, private :: ANALYTIC_EIGENVALUES = 2 -+ -+ contains -+ -+ function decompose(num, decomposition) result(possible) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: num -+ TEST_INT_TYPE, intent(out) :: decomposition(num_primes) -+ logical :: possible -+ TEST_INT_TYPE :: reminder, prime, prime_id -+ -+ decomposition = 0 -+ possible = .true. -+ reminder = num -+ do prime_id = 1, num_primes -+ prime = primes(prime_id) -+ do while (MOD(reminder, prime) == 0) -+ decomposition(prime_id) = decomposition(prime_id) + 1 -+ reminder = reminder / prime -+ end do -+ end do -+ if(reminder > 1) then -+ possible = .false. -+ end if -+ end function -+ -+ function compose(decomposition) result(num) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: decomposition(num_primes) -+ TEST_INT_TYPE :: num, prime_id -+ -+ num = 1; -+ do prime_id = 1, num_primes -+ num = num * primes(prime_id) ** decomposition(prime_id) -+ end do -+ end function -+ -+ -+#include "../../src/general/prow_pcol.F90" -+#include "../../src/general/map_global_to_local.F90" -+ -+ -+#define COMPLEXCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_analytic_template.F90" -+#undef DOUBLE_PRECISION -+#undef COMPLEXCASE -+ -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ -+#define COMPLEXCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_analytic_template.F90" -+#undef SINGLE_PRECISION -+#undef COMPLEXCASE -+ -+#endif /* WANT_SINGLE_PRECISION_COMPLEX */ -+ -+#define REALCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_analytic_template.F90" -+#undef DOUBLE_PRECISION -+#undef REALCASE -+ -+#ifdef WANT_SINGLE_PRECISION_REAL -+ -+#define REALCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_analytic_template.F90" -+#undef SINGLE_PRECISION -+#undef REALCASE -+ -+#endif /* WANT_SINGLE_PRECISION_REAL */ -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_analytic_template.F90 elpa-2020.11.001_ok/examples/shared/test_analytic_template.F90 ---- elpa-2020.11.001/examples/shared/test_analytic_template.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_analytic_template.F90 2021-02-02 12:54:50.046589000 +0100 -@@ -0,0 +1,667 @@ -+! (c) Copyright Pavel Kus, 2017, MPCDF -+! -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+ -+ subroutine prepare_matrix_analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol, print_times) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: na, nblk, myid, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=REAL_DATATYPE), intent(inout):: a(:,:) -+ logical, optional :: print_times -+ logical :: print_timer -+ TEST_INT_TYPE :: globI, globJ, locI, locJ, pi, pj, levels(num_primes) -+ integer(kind=c_int) :: loc_I, loc_J, p_i, p_j -+#ifdef HAVE_DETAILED_TIMINGS -+ type(timer_t) :: timer -+#else -+ type(timer_dummy_t) :: timer -+#endif -+ -+ call timer%enable() -+ call timer%start("prepare_matrix_analytic") -+ -+ print_timer = .true. -+ -+ if (present(print_times)) then -+ print_timer = print_times -+ endif -+ -+ ! for debug only, do it systematicaly somehow ... unit tests -+ call check_module_sanity_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid) -+ -+ if(.not. decompose(na, levels)) then -+ if(myid == 0) then -+ print *, "Analytic test can be run only with matrix sizes of the form 2^n * 3^m * 5^o" -+ stop 1 -+ end if -+ end if -+ -+ call timer%start("loop") -+ do globI = 1, na -+ -+ p_i = prow(int(globI,kind=c_int), int(nblk,kind=c_int), int(np_rows,kind=c_int)) -+ pi = int(p_i,kind=INT_TYPE) -+ if (my_prow .ne. pi) cycle -+ -+ do globJ = 1, na -+ -+ p_j = pcol(int(globJ,kind=c_int), int(nblk,kind=c_int), int(np_cols,kind=c_int)) -+ pj = int(p_j,kind=INT_TYPE) -+ if (my_pcol .ne. pj) cycle -+ -+ if(map_global_array_index_to_local_index(int(globI,kind=c_int), int(globJ,kind=c_int), loc_I, loc_J, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ locI = int(loc_i,kind=INT_TYPE) -+ locJ = int(loc_j,kind=INT_TYPE) -+ call timer%start("evaluation") -+ a(locI, locJ) = analytic_matrix_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, globI, globJ) -+ call timer%stop("evaluation") -+ else -+ print *, "Warning ... error in preparation loop of the analytic test" -+ end if -+ end do -+ end do -+ call timer%stop("loop") -+ -+ call timer%stop("prepare_matrix_analytic") -+ if(myid == 0 .and. print_timer) then -+ call timer%print("prepare_matrix_analytic") -+ end if -+ call timer%free() -+ end subroutine -+ -+ function check_correctness_analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, check_all_evals, & -+ check_eigenvectors, print_times) result(status) -+ use precision_for_tests -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: na, nev, nblk, myid, np_rows, & -+ np_cols, my_prow, my_pcol -+ TEST_INT_TYPE :: status -+ TEST_INT_MPI_TYPE :: mpierr -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:) -+ real(kind=rk), intent(inout) :: ev(:) -+ logical, intent(in) :: check_all_evals, check_eigenvectors -+ -+ TEST_INT_TYPE :: globI, globJ, locI, locJ, & -+ levels(num_primes) -+ integer(kind=c_int) :: loc_I, loc_J -+ real(kind=rk) :: diff, max_z_diff, max_ev_diff, & -+ glob_max_z_diff, max_curr_z_diff -+#ifdef DOUBLE_PRECISION -+ real(kind=rk), parameter :: tol_eigenvalues = 5e-14_rk8 -+ real(kind=rk), parameter :: tol_eigenvectors = 6e-11_rk8 -+#endif -+#ifdef SINGLE_PRECISION -+ ! tolerance needs to be very high due to qr tests -+ ! it should be distinguished somehow! -+ real(kind=rk), parameter :: tol_eigenvalues = 7e-6_rk4 -+ real(kind=rk), parameter :: tol_eigenvectors = 4e-3_rk4 -+#endif -+ real(kind=rk) :: computed_ev, expected_ev -+ MATH_DATATYPE(kind=rck) :: computed_z, expected_z -+ -+ MATH_DATATYPE(kind=rck) :: max_value_for_normalization, & -+ computed_z_on_max_position, & -+ normalization_quotient -+ MATH_DATATYPE(kind=rck) :: max_values_array(np_rows * np_cols), & -+ corresponding_exact_value -+ integer(kind=c_int) :: max_value_idx, rank_with_max, & -+ rank_with_max_reduced, & -+ num_checked_evals -+ integer(kind=c_int) :: max_idx_array(np_rows * np_cols), & -+ rank -+ logical, optional :: print_times -+ logical :: print_timer -+ -+#ifdef HAVE_DETAILED_TIMINGS -+ type(timer_t) :: timer -+#else -+ type(timer_dummy_t) :: timer -+#endif -+ -+ call timer%enable() -+ call timer%start("check_correctness_analytic") -+ -+ -+ print_timer = .true. -+ if (present(print_times)) then -+ print_timer = print_times -+ endif -+ -+ if(.not. decompose(na, levels)) then -+ print *, "can not decomopse matrix size" -+ stop 1 -+ end if -+ -+ if(check_all_evals) then -+ num_checked_evals = na -+ else -+ num_checked_evals = nev -+ endif -+ !call print_matrix(myid, na, z, "z") -+ max_z_diff = 0.0_rk -+ max_ev_diff = 0.0_rk -+ call timer%start("loop_eigenvalues") -+ do globJ = 1, num_checked_evals -+ computed_ev = ev(globJ) -+ call timer%start("evaluation") -+ expected_ev = analytic_eigenvalues_real_& -+ &PRECISION& -+ &(na, globJ) -+ call timer%stop("evaluation") -+ diff = abs(computed_ev - expected_ev) -+ max_ev_diff = max(diff, max_ev_diff) -+ end do -+ call timer%stop("loop_eigenvalues") -+ -+ call timer%start("loop_eigenvectors") -+ do globJ = 1, nev -+ max_curr_z_diff = 0.0_rk -+ -+ ! eigenvectors are unique up to multiplication by scalar (complex in complex case) -+ ! to be able to compare them with analytic, we have to normalize them somehow -+ ! we will find a value in computed eigenvector with highest absolut value and enforce -+ ! such multiple of computed eigenvector, that the value on corresponding position is the same -+ ! as an corresponding value in the analytical eigenvector -+ -+ ! find the maximal value in the local part of given eigenvector (with index globJ) -+ max_value_for_normalization = 0.0_rk -+ max_value_idx = -1 -+ do globI = 1, na -+ if(map_global_array_index_to_local_index(int(globI,kind=c_int), int(globJ,kind=c_int), loc_I, loc_J, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ locI = int(loc_I,kind=INT_TYPE) -+ locJ = int(loc_J,kind=INT_TYPE) -+ computed_z = z(locI, locJ) -+ if(abs(computed_z) > abs(max_value_for_normalization)) then -+ max_value_for_normalization = computed_z -+ max_value_idx = int(globI,kind=c_int) -+ end if -+ end if -+ end do -+ -+ ! find the global maximum and its position. From technical reasons (looking for a -+ ! maximum of complex number), it is not so easy to do it nicely. Therefore we -+ ! communicate local maxima to mpi rank 0 and resolve there. If we wanted to do -+ ! it without this, it would be tricky.. question of uniquness - two complex numbers -+ ! with the same absolut values, but completely different... -+#ifdef WITH_MPI -+ call MPI_Gather(max_value_for_normalization, 1_MPI_KIND, MPI_MATH_DATATYPE_PRECISION, & -+ max_values_array, 1_MPI_KIND, MPI_MATH_DATATYPE_PRECISION, 0_MPI_KIND, & -+ int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+ call MPI_Gather(max_value_idx, 1_MPI_KIND, MPI_INT, max_idx_array, 1_MPI_KIND, MPI_INT, & -+ 0_MPI_KIND, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+ max_value_for_normalization = 0.0_rk -+ max_value_idx = -1 -+ do rank = 1, np_cols * np_rows -+ if(abs(max_values_array(rank)) > abs(max_value_for_normalization)) then -+ max_value_for_normalization = max_values_array(rank) -+ max_value_idx = max_idx_array(rank) -+ end if -+ end do -+ call MPI_Bcast(max_value_for_normalization, 1_MPI_KIND, MPI_MATH_DATATYPE_PRECISION, & -+ 0_MPI_KIND, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+ call MPI_Bcast(max_value_idx, 1_MPI_KIND, MPI_INT, 0_MPI_KIND, & -+ int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#endif -+ ! we decided what the maximum computed value is. Calculate expected value on the same -+ if(abs(max_value_for_normalization) < 0.0001_rk) then -+ if(myid == 0) print *, 'Maximal value in eigenvector too small :', max_value_for_normalization -+ status =1 -+ return -+ end if -+ call timer%start("evaluation_helper") -+ corresponding_exact_value = analytic_eigenvectors_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, int(max_value_idx,kind=INT_TYPE), globJ) -+ call timer%stop("evaluation_helper") -+ normalization_quotient = corresponding_exact_value / max_value_for_normalization -+ ! write(*,*) "normalization q", normalization_quotient -+ -+ ! compare computed and expected eigenvector values, but take into account normalization quotient -+ do globI = 1, na -+ if(map_global_array_index_to_local_index(int(globI,kind=c_int), int(globJ,kind=c_int), loc_I, loc_J, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ locI = int(loc_I,kind=INT_TYPE) -+ locJ = int(loc_J,kind=INT_TYPE) -+ computed_z = z(locI, locJ) -+ call timer%start("evaluation") -+ expected_z = analytic_eigenvectors_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, globI, globJ) -+ call timer%stop("evaluation") -+ max_curr_z_diff = max(abs(normalization_quotient * computed_z - expected_z), max_curr_z_diff) -+ end if -+ end do -+ ! we have max difference of one of the eigenvectors, update global -+ max_z_diff = max(max_z_diff, max_curr_z_diff) -+ end do !globJ -+ call timer%stop("loop_eigenvectors") -+ -+#ifdef WITH_MPI -+ call mpi_allreduce(max_z_diff, glob_max_z_diff, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, & -+ int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#else -+ glob_max_z_diff = max_z_diff -+#endif -+ if(myid == 0) print *, 'Maximum error in eigenvalues :', max_ev_diff -+ if (check_eigenvectors) then -+ if(myid == 0) print *, 'Maximum error in eigenvectors :', glob_max_z_diff -+ endif -+ -+ status = 0 -+ if (nev .gt. 2) then -+ if (max_ev_diff .gt. tol_eigenvalues .or. max_ev_diff .eq. 0.0_rk) status = 1 -+ if (check_eigenvectors) then -+ if (glob_max_z_diff .gt. tol_eigenvectors .or. glob_max_z_diff .eq. 0.0_rk) status = 1 -+ endif -+ else -+ if (max_ev_diff .gt. tol_eigenvalues) status = 1 -+ if (check_eigenvectors) then -+ if (glob_max_z_diff .gt. tol_eigenvectors) status = 1 -+ endif -+ endif -+ -+ call timer%stop("check_correctness_analytic") -+ if(myid == 0 .and. print_timer) then -+ call timer%print("check_correctness_analytic") -+ end if -+ call timer%free() -+ end function -+ -+ -+ function analytic_matrix_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j) result(element) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: na, i, j -+ MATH_DATATYPE(kind=REAL_DATATYPE) :: element -+ -+ element = analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j, ANALYTIC_MATRIX) -+ -+ end function -+ -+ function analytic_eigenvectors_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j) result(element) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: na, i, j -+ MATH_DATATYPE(kind=REAL_DATATYPE) :: element -+ -+ element = analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j, ANALYTIC_EIGENVECTORS) -+ -+ end function -+ -+ function analytic_eigenvalues_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i) result(element) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: na, i -+ real(kind=REAL_DATATYPE) :: element -+ -+ element = analytic_real_& -+ &PRECISION& -+ &(na, i, i, ANALYTIC_EIGENVALUES) -+ -+ end function -+ -+ function analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j, what) result(element) -+ use precision_for_tests -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: na, i, j, what -+ MATH_DATATYPE(kind=rck) :: element, mat2x2(2,2), mat(5,5) -+ real(kind=rk) :: a, am, amp -+ TEST_INT_TYPE :: levels(num_primes) -+ TEST_INT_TYPE :: ii, jj, m, prime_id, prime, total_level, level -+ -+ real(kind=rk), parameter :: s = 0.5_rk -+ real(kind=rk), parameter :: c = 0.86602540378443864679_rk -+ real(kind=rk), parameter :: sq2 = 1.4142135623730950488_rk -+ -+ real(kind=rk), parameter :: largest_ev = 2.0_rk -+ -+ assert(i <= na) -+ assert(j <= na) -+ assert(i >= 0) -+ assert(j >= 0) -+ assert(decompose(na, levels)) -+ ! go to zero-based indexing -+ ii = i - 1 -+ jj = j - 1 -+ if (na .gt. 2) then -+ a = exp(log(largest_ev)/(na-1)) -+ else -+ a = exp(log(largest_ev)/(1)) -+ endif -+ -+ element = 1.0_rck -+#ifdef COMPLEXCASE -+ element = (1.0_rk, 0.0_rk) -+#endif -+ total_level = 0 -+ am = a -+ do prime_id = 1,num_primes -+ prime = primes(prime_id) -+ do level = 1, levels(prime_id) -+ amp = am**(prime-1) -+ total_level = total_level + 1 -+ if(what == ANALYTIC_MATRIX) then -+#ifdef REALCASE -+ mat2x2 = reshape((/ c*c + amp * s*s, (amp - 1.0_rk) * s*c, & -+ (amp - 1.0_rk) * s*c, s*s + amp * c*c /), & -+ (/2, 2/), order=(/2,1/)) -+#endif -+#ifdef COMPLEXCASE -+ mat2x2 = reshape((/ 0.5_rck * (amp + 1.0_rck) * (1.0_rk, 0.0_rk), sq2/4.0_rk * (amp - 1.0_rk) * (1.0_rk, 1.0_rk), & -+ sq2/4.0_rk * (amp - 1.0_rk) * (1.0_rk, -1.0_rk), 0.5_rck * (amp + 1.0_rck) * (1.0_rk, 0.0_rk) /), & -+ (/2, 2/), order=(/2,1/)) -+! intel 2018 does not reshape correctly (one would have to specify order=(/1,2/) -+! until this is resolved, I resorted to the following -+ mat2x2(1,2) = sq2/4.0_rk * (amp - 1.0_rk) * (1.0_rk, 1.0_rk) -+ mat2x2(2,1) = sq2/4.0_rk * (amp - 1.0_rk) * (1.0_rk, -1.0_rk) -+#endif -+ else if(what == ANALYTIC_EIGENVECTORS) then -+#ifdef REALCASE -+ mat2x2 = reshape((/ c, s, & -+ -s, c /), & -+ (/2, 2/), order=(/2,1/)) -+! intel 2018 does not reshape correctly (one would have to specify order=(/1,2/) -+! until this is resolved, I resorted to the following -+ mat2x2(1,2) = s -+ mat2x2(2,1) = -s -+#endif -+#ifdef COMPLEXCASE -+ mat2x2 = reshape((/ -sq2/2.0_rck * (1.0_rk, 0.0_rk), -sq2/2.0_rck * (1.0_rk, 0.0_rk), & -+ 0.5_rk * (1.0_rk, -1.0_rk), 0.5_rk * (-1.0_rk, 1.0_rk) /), & -+ (/2, 2/), order=(/2,1/)) -+! intel 2018 does not reshape correctly (one would have to specify order=(/1,2/) -+! until this is resolved, I resorted to the following -+ mat2x2(1,2) = -sq2/2.0_rck * (1.0_rk, 0.0_rk) -+ mat2x2(2,1) = 0.5_rk * (1.0_rk, -1.0_rk) -+#endif -+ else if(what == ANALYTIC_EIGENVALUES) then -+ mat2x2 = reshape((/ 1.0_rck, 0.0_rck, & -+ 0.0_rck, amp /), & -+ (/2, 2/), order=(/2,1/)) -+ else -+ assert(.false.) -+ end if -+ -+ mat = 0.0_rck -+ if(prime == 2) then -+ mat(1:2, 1:2) = mat2x2 -+ else if(prime == 3) then -+ mat((/1,3/),(/1,3/)) = mat2x2 -+ if(what == ANALYTIC_EIGENVECTORS) then -+ mat(2,2) = 1.0_rck -+ else -+ mat(2,2) = am -+ end if -+ else if(prime == 5) then -+ mat((/1,5/),(/1,5/)) = mat2x2 -+ if(what == ANALYTIC_EIGENVECTORS) then -+ mat(2,2) = 1.0_rck -+ mat(3,3) = 1.0_rck -+ mat(4,4) = 1.0_rck -+ else -+ mat(2,2) = am -+ mat(3,3) = am**2 -+ mat(4,4) = am**3 -+ end if -+ else -+ assert(.false.) -+ end if -+ -+ ! write(*,*) "calc value, elem: ", element, ", mat: ", mod(ii,2), mod(jj,2), mat(mod(ii,2), mod(jj,2)), "am ", am -+ ! write(*,*) " matrix mat", mat -+ element = element * mat(mod(ii,prime) + 1, mod(jj,prime) + 1) -+ ii = ii / prime -+ jj = jj / prime -+ -+ am = am**prime -+ end do -+ end do -+ !write(*,*) "returning value ", element -+ end function -+ -+ -+ subroutine print_matrix_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid, na, mat, mat_name) -+ use precision_for_tests -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: myid, na -+ character(len=*), intent(in) :: mat_name -+ MATH_DATATYPE(kind=rck) :: mat(na, na) -+ TEST_INT_TYPE :: i,j -+ character(len=20) :: na_str -+ -+ if(myid .ne. 0) & -+ return -+ write(*,*) "Matrix: "//trim(mat_name) -+ write(na_str, *) na -+ do i = 1, na -+#ifdef REALCASE -+ write(*, '('//trim(na_str)//'f8.3)') mat(i, :) -+#endif -+#ifdef COMPLEXCASE -+ write(*,'('//trim(na_str)//'(A,f8.3,A,f8.3,A))') ('(', real(mat(i,j)), ',', aimag(mat(i,j)), ')', j=1,na) -+#endif -+ end do -+ write(*,*) -+ end subroutine -+ -+ -+ subroutine check_matrices_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid, na) -+ use precision_for_tests -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: myid, na -+ MATH_DATATYPE(kind=rck) :: A(na, na), S(na, na), L(na, na), res(na, na) -+ TEST_INT_TYPE :: i, j, decomposition(num_primes) -+ -+ real(kind=rk) :: err -+#ifdef DOUBLE_PRECISION -+ real(kind=rk), parameter :: TOL = 1e-8 -+#endif -+#ifdef SINGLE_PRECISION -+ real(kind=rk), parameter :: TOL = 1e-4 -+#endif -+ -+ assert(decompose(na, decomposition)) -+ -+ do i = 1, na -+ do j = 1, na -+ A(i,j) = analytic_matrix_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j) -+ S(i,j) = analytic_eigenvectors_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j) -+ L(i,j) = analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j, ANALYTIC_EIGENVALUES) -+ end do -+ end do -+ -+ res = matmul(A,S) - matmul(S,L) -+ err = maxval(abs(res)) -+ -+ if(err > TOL) then -+ print *, "WARNING: sanity test in module analytic failed, error is ", err -+ end if -+ -+ if(.false.) then -+ !if(na == 2 .or. na == 5) then -+ call print_matrix(myid, na, A, "A") -+ call print_matrix(myid, na, S, "S") -+ call print_matrix(myid, na, L, "L") -+ -+ call print_matrix(myid, na, matmul(A,S), "AS") -+ call print_matrix(myid, na, matmul(S,L), "SL") -+ -+ call print_matrix(myid, na, res , "res") -+ end if -+ -+ end subroutine -+ -+ subroutine check_module_sanity_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: myid -+ TEST_INT_TYPE :: decomposition(num_primes), i -+ TEST_INT_TYPE, parameter :: check_sizes(7) = (/2, 3, 5, 6, 10, 25, 150/) -+ if(myid == 0) print *, "Checking test_analytic module sanity.... " -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+ assert(decompose(1500_lik, decomposition)) -+#else -+ assert(decompose(1500_ik, decomposition)) -+#endif -+ assert(all(decomposition == (/2,1,3/))) -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+ assert(decompose(6_lik,decomposition)) -+#else -+ assert(decompose(6_ik,decomposition)) -+#endif -+ assert(all(decomposition == (/1,1,0/))) -+ -+ do i =1, size(check_sizes) -+ call check_matrices_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid, check_sizes(i)) -+ end do -+ -+ if(myid == 0) print *, "Checking test_analytic module sanity.... DONE" -+ -+ end subroutine -diff -ruN elpa-2020.11.001/examples/shared/test_blacs_infrastructure.F90 elpa-2020.11.001_ok/examples/shared/test_blacs_infrastructure.F90 ---- elpa-2020.11.001/examples/shared/test_blacs_infrastructure.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_blacs_infrastructure.F90 2021-02-02 12:54:50.044386000 +0100 -@@ -0,0 +1,208 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#define TEST_C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#define TEST_C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#endif -+ -+module test_blacs_infrastructure -+ -+ contains -+ -+ !c> void set_up_blacsgrid_f(TEST_C_INT_TYPE mpi_comm_parent, TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, char layout, -+ !c> TEST_C_INT_TYPE_PTR my_blacs_ctxt, TEST_C_INT_TYPE_PTR my_prow, -+ !c> TEST_C_INT_TYPE_PTR my_pcol); -+ subroutine set_up_blacsgrid(mpi_comm_parent, np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) bind(C, name="set_up_blacsgrid_f") -+ -+ use precision_for_tests -+ use test_util -+ use iso_c_binding -+ -+ implicit none -+ TEST_INT_TYPE, intent(in), value :: mpi_comm_parent, np_rows, np_cols -+#ifdef SXAURORA -+ character(len=1), intent(in) :: layout -+#else -+ character(kind=c_char), intent(in), value :: layout -+#endif -+ TEST_INT_TYPE, intent(out) :: my_blacs_ctxt, my_prow, my_pcol -+ -+#ifdef WITH_MPI -+ TEST_INT_TYPE :: np_rows_, np_cols_ -+#endif -+ -+ if (layout /= 'R' .and. layout /= 'C') then -+ print *, "layout must be 'R' or 'C'" -+ stop 1 -+ end if -+ -+ my_blacs_ctxt = mpi_comm_parent -+#ifdef WITH_MPI -+ call BLACS_Gridinit(my_blacs_ctxt, layout, np_rows, np_cols) -+ call BLACS_Gridinfo(my_blacs_ctxt, np_rows_, np_cols_, my_prow, my_pcol) -+ if (np_rows /= np_rows_) then -+ print *, "BLACS_Gridinfo returned different values for np_rows as set by BLACS_Gridinit" -+ stop 1 -+ endif -+ if (np_cols /= np_cols_) then -+ print *, "BLACS_Gridinfo returned different values for np_cols as set by BLACS_Gridinit" -+ stop 1 -+ endif -+#else -+ my_prow = 0 -+ my_pcol = 0 -+#endif -+ end subroutine -+ -+ subroutine set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, na_rows, & -+ na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ use elpa_utilities, only : error_unit -+ use test_util -+ use precision_for_tests -+ use tests_scalapack_interfaces -+ implicit none -+ -+ TEST_INT_TYPE, intent(in) :: na, nblk, my_prow, my_pcol, np_rows, & -+ np_cols, & -+ my_blacs_ctxt -+ TEST_INT_TYPE, intent(inout) :: info -+ TEST_INT_TYPE, intent(out) :: na_rows, na_cols, sc_desc(1:9) -+ -+#ifdef WITH_MPI -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ sc_desc(:) = 0 -+ ! determine the neccessary size of the distributed matrices, -+ ! we use the scalapack tools routine NUMROC -+ -+ na_rows = numroc(na, nblk, my_prow, 0_BLAS_KIND, np_rows) -+ na_cols = numroc(na, nblk, my_pcol, 0_BLAS_KIND, np_cols) -+ -+ ! set up the scalapack descriptor for the checks below -+ ! For ELPA the following restrictions hold: -+ ! - block sizes in both directions must be identical (args 4 a. 5) -+ ! - first row and column of the distributed matrix must be on -+ ! row/col 0/0 (arg 6 and 7) -+ -+ call descinit(sc_desc, na, na, nblk, nblk, 0_BLAS_KIND, 0_BLAS_KIND, & -+ my_blacs_ctxt, na_rows, info) -+ -+ if (info .ne. 0) then -+ write(error_unit,*) 'Error in BLACS descinit! info=',info -+ write(error_unit,*) 'Most likely this happend since you want to use' -+ write(error_unit,*) 'more MPI tasks than are possible for your' -+ write(error_unit,*) 'problem size (matrix size and blocksize)!' -+ write(error_unit,*) 'The blacsgrid can not be set up properly' -+ write(error_unit,*) 'Try reducing the number of MPI tasks...' -+ call MPI_ABORT(int(mpi_comm_world,kind=MPI_KIND), 1_MPI_KIND, mpierr) -+ endif -+#else /* WITH_MPI */ -+ na_rows = na -+ na_cols = na -+#endif /* WITH_MPI */ -+ -+ end subroutine -+ -+ !c> void set_up_blacs_descriptor_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nblk, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE_PTR na_rows, TEST_C_INT_TYPE_PTR na_cols, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE my_blacs_ctxt, -+ !c> TEST_C_INT_TYPE_PTR info); -+ subroutine set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, na_rows, & -+ na_cols, sc_desc, & -+ my_blacs_ctxt, info) & -+ bind(C, name="set_up_blacs_descriptor_f") -+ -+ use iso_c_binding -+ implicit none -+ -+ -+ TEST_INT_TYPE, value :: na, nblk, my_prow, my_pcol, np_rows, & -+ np_cols, my_blacs_ctxt -+ TEST_INT_TYPE :: na_rows, na_cols, info, sc_desc(1:9) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, na_rows, & -+ na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ -+ end subroutine -+ -+ -+ function index_l2g(idx_loc, nblk, iproc, nprocs) result(indexl2g) -+ use precision_for_tests -+ implicit none -+ TEST_INT_TYPE :: indexl2g -+ TEST_INT_TYPE :: idx_loc, nblk, iproc, nprocs -+ indexl2g = nprocs * nblk * ((idx_loc-1) / nblk) + mod(idx_loc-1,nblk) + mod(nprocs+iproc, nprocs)*nblk + 1 -+ return -+ end function -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_check_correctness.F90 elpa-2020.11.001_ok/examples/shared/test_check_correctness.F90 ---- elpa-2020.11.001/examples/shared/test_check_correctness.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_check_correctness.F90 2021-02-02 12:54:50.054408000 +0100 -@@ -0,0 +1,156 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! Author: A. Marek, MPCDF -+#include "config-f90.h" -+ -+module test_check_correctness -+ use test_util -+ -+ interface check_correctness_evp_numeric_residuals -+ module procedure check_correctness_evp_numeric_residuals_complex_double -+ module procedure check_correctness_evp_numeric_residuals_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_evp_numeric_residuals_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_evp_numeric_residuals_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_evp_numeric_residuals_ss -+! module procedure check_correctness_evp_numeric_residuals_ss_complex_double -+ module procedure check_correctness_evp_numeric_residuals_ss_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_evp_numeric_residuals_ss_real_single -+#endif -+! #ifdef WANT_SINGLE_PRECISION_COMPLEX -+! module procedure check_correctness_evp_numeric_residuals_ss_complex_single -+! #endif -+ end interface -+ -+ interface check_correctness_eigenvalues_toeplitz -+ module procedure check_correctness_eigenvalues_toeplitz_complex_double -+ module procedure check_correctness_eigenvalues_toeplitz_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_eigenvalues_toeplitz_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_eigenvalues_toeplitz_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_eigenvalues_frank -+ module procedure check_correctness_eigenvalues_frank_complex_double -+ module procedure check_correctness_eigenvalues_frank_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_eigenvalues_frank_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_eigenvalues_frank_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_cholesky -+ module procedure check_correctness_cholesky_complex_double -+ module procedure check_correctness_cholesky_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_cholesky_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_cholesky_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_hermitian_multiply -+ module procedure check_correctness_hermitian_multiply_complex_double -+ module procedure check_correctness_hermitian_multiply_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_hermitian_multiply_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_hermitian_multiply_complex_single -+#endif -+ end interface -+ -+ -+ contains -+ -+#define COMPLEXCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_check_correctness_template.F90" -+#undef DOUBLE_PRECISION -+#undef COMPLEXCASE -+ -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ -+#define COMPLEXCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_check_correctness_template.F90" -+#undef SINGLE_PRECISION -+#undef COMPLEXCASE -+#endif /* WANT_SINGLE_PRECISION_COMPLEX */ -+ -+#define REALCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_check_correctness_template.F90" -+#undef DOUBLE_PRECISION -+#undef REALCASE -+ -+#ifdef WANT_SINGLE_PRECISION_REAL -+ -+#define REALCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_check_correctness_template.F90" -+#undef SINGLE_PRECISION -+#undef REALCASE -+ -+ -+#endif /* WANT_SINGLE_PRECISION_REAL */ -+ -+#include "../../src/general/prow_pcol.F90" -+#include "../../src/general/map_global_to_local.F90" -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_check_correctness_template.F90 elpa-2020.11.001_ok/examples/shared/test_check_correctness_template.F90 ---- elpa-2020.11.001/examples/shared/test_check_correctness_template.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_check_correctness_template.F90 2021-02-02 12:54:50.053371000 +0100 -@@ -0,0 +1,1134 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! Author: A. Marek, MPCDF -+ -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE lik -+#define TEST_C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE ik -+#define TEST_C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE lik -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE ik -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#endif -+ -+#if REALCASE == 1 -+ function check_correctness_evp_numeric_residuals_ss_real_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) result(status) -+ use tests_blas_interfaces -+ use tests_scalapack_interfaces -+ use precision_for_tests -+ use iso_c_binding -+ implicit none -+#include "../../src/general/precision_kinds.F90" -+ integer(kind=BLAS_KIND) :: status, na_cols, na_rows -+ integer(kind=BLAS_KIND), intent(in) :: na, nev, nblk, myid, np_rows, np_cols, my_prow, my_pcol -+ real(kind=rk), intent(in) :: as(:,:) -+ real(kind=rk) :: tmpr -+ complex(kind=rck), intent(in) :: z(:,:) -+ real(kind=rk) :: ev(:) -+ complex(kind=rck), dimension(size(as,dim=1),size(as,dim=2)) :: tmp1, tmp2 -+ complex(kind=rck) :: xc -+ -+ complex(kind=rck), allocatable :: as_complex(:,:) -+ -+ integer(kind=BLAS_KIND) :: sc_desc(:) -+ -+ integer(kind=BLAS_KIND) :: i, j, rowLocal, colLocal -+ integer(kind=c_int) :: row_Local, col_Local -+ real(kind=rck) :: err, errmax -+ -+ integer :: mpierr -+ -+ ! tolerance for the residual test for different math type/precision setups -+ real(kind=rk), parameter :: tol_res_real_double = 5e-4_rk -+ real(kind=rk), parameter :: tol_res_real_single = 3e-2_rk -+ real(kind=rk), parameter :: tol_res_complex_double = 5e-12_rk -+ real(kind=rk), parameter :: tol_res_complex_single = 3e-2_rk -+ real(kind=rk) :: tol_res = tol_res_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION -+ ! precision of generalized problem is lower -+ real(kind=rk), parameter :: generalized_penalty = 10.0_rk -+ -+ ! tolerance for the orthogonality test for different math type/precision setups -+! real(kind=rk), parameter :: tol_orth_real_double = 5e-11_rk -+ real(kind=rk), parameter :: tol_orth_real_double = 5e-4_rk -+ real(kind=rk), parameter :: tol_orth_real_single = 9e-2_rk -+ real(kind=rk), parameter :: tol_orth_complex_double = 5e-11_rk -+ real(kind=rk), parameter :: tol_orth_complex_single = 9e-3_rk -+ real(kind=rk), parameter :: tol_orth = tol_orth_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION -+ -+ complex(kind=rck), parameter :: CZERO = (0.0_rck,0.0_rck), CONE = (1.0_rck,0.0_rck) -+ -+ -+ status = 0 -+ ! Setup complex matrices and eigenvalues -+ na_rows = size(as,dim=1) -+ na_cols = size(as,dim=2) -+ -+ allocate(as_complex(na_rows,na_cols)) -+ do j=1, na_cols -+ do i=1,na_rows -+#ifdef DOUBLE_PRECISION_REAL -+ as_complex(i,j) = dcmplx(as(i,j),0.0_rk) -+#else -+ as_complex(i,j) = cmplx(as(i,j),0.0_rk) -+#endif -+ enddo -+ enddo -+ -+ ! 1. Residual (maximum of || A*Zi - Zi*EVi ||) -+ -+ ! tmp1 = Zi*EVi -+ tmp1(:,:) = z(:,:) -+ do i=1,nev -+#ifdef DOUBLE_PRECISION_REAL -+ xc = dcmplx(0.0_rk,ev(i)) -+#else -+ xc = cmplx(0.0_rk,ev(i)) -+#endif -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call pzscal(int(na,kind=BLAS_KIND), xc, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, 1_BLAS_KIND) -+#else -+ call pcscal(int(na,kind=BLAS_KIND), xc, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, 1_BLAS_KIND) -+#endif -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ call zscal(int(na,kind=BLAS_KIND), xc, tmp1(:,i), 1_BLAS_KIND) -+#else -+ call cscal(int(na,kind=BLAS_KIND), xc, tmp1(:,i), 1_BLAS_KIND) -+#endif -+#endif /* WITH_MPI */ -+ enddo -+ -+ ! normal eigenvalue problem .. no need to multiply -+ tmp2(:,:) = tmp1(:,:) -+ -+ ! tmp1 = A * Z -+ ! as is original stored matrix, Z are the EVs -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call PZGEMM('N', 'N', int(na,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), & -+ CONE, as_complex, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, CZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else -+ call PCGEMM('N', 'N', int(na,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), & -+ CONE, as_complex, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, CZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#endif -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ call ZGEMM('N','N',int(na,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), CONE, & -+ as_complex, int(na,kind=BLAS_KIND), z,int(na,kind=BLAS_KIND), CZERO, tmp1, int(na,kind=BLAS_KIND) ) -+#else -+ call CGEMM('N','N', int(na,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), CONE, & -+ as_complex, int(na,kind=BLAS_KIND), z, int(na,kind=BLAS_KIND), CZERO, tmp1, int(na,kind=BLAS_KIND) ) -+#endif -+#endif /* WITH_MPI */ -+ -+ ! tmp1 = A*Zi - Zi*EVi -+ tmp1(:,:) = tmp1(:,:) - tmp2(:,:) -+ -+ ! Get maximum norm of columns of tmp1 -+ errmax = 0.0_rk -+ -+ do i=1,nev -+ xc = (0.0_rk,0.0_rk) -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call PZDOTC(int(na,kind=BLAS_KIND), xc, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, & -+ 1_BLAS_KIND, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, 1_BLAS_KIND) -+#else -+ call PCDOTC(int(na,kind=BLAS_KIND), xc, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, & -+ 1_BLAS_KIND, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, 1_BLAS_KIND) -+#endif -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ xc = ZDOTC(int(na,kind=BLAS_KIND) ,tmp1, 1_BLAS_KIND, tmp1, 1_BLAS_KIND) -+#else -+ xc = CDOTC(int(na,kind=BLAS_KIND) ,tmp1, 1_BLAS_KIND, tmp1, 1_BLAS_KIND) -+#endif -+#endif /* WITH_MPI */ -+ errmax = max(errmax, sqrt(real(xc,kind=REAL_DATATYPE))) -+ enddo -+ -+ ! Get maximum error norm over all processors -+ err = errmax -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'%Results of numerical residual checks, using complex arithmetic:' -+ if (myid==0) print *,'%Error Residual :',errmax -+ if (nev .ge. 2) then -+ if (errmax .gt. tol_res .or. errmax .eq. 0.0_rk) then -+ status = 1 -+ endif -+ else -+ if (errmax .gt. tol_res) then -+ status = 1 -+ endif -+ endif -+ -+ ! 2. Eigenvector orthogonality -+ tmp2(:,:) = z(:,:) -+ tmp1 = 0 -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call PZGEMM('C', 'N', int(nev,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), & -+ CONE, z, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, CZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else -+ call PCGEMM('C', 'N', int(nev,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), & -+ CONE, z, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, CZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#endif -+ -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ call ZGEMM('C','N', int(nev,kind=BLAS_KIND) , int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND),CONE, z, & -+ int(na,kind=BLAS_KIND), tmp2, int(na,kind=BLAS_KIND), CZERO, tmp1, int(na,kind=BLAS_KIND)) -+#else -+ call CGEMM('C','N', int(nev,kind=BLAS_KIND) , int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND),CONE, z, & -+ int(na,kind=BLAS_KIND), tmp2, int(na,kind=BLAS_KIND), CZERO, tmp1, int(na,kind=BLAS_KIND)) -+#endif -+#endif /* WITH_MPI */ -+ ! First check, whether the elements on diagonal are 1 .. "normality" of the vectors -+ err = 0.0_rk -+ do i=1, nev -+ if (map_global_array_index_to_local_index(int(i,kind=c_int), int(i,kind=c_int), row_Local, col_Local, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int)) ) then -+ rowLocal = int(row_Local,kind=INT_TYPE) -+ colLocal = int(col_Local,kind=INT_TYPE) -+ err = max(err, abs(tmp1(rowLocal,colLocal) - CONE)) -+ endif -+ end do -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'%Maximal error in eigenvector lengths:',errmax -+ -+ ! Second, find the maximal error in the whole Z**T * Z matrix (its diference from identity matrix) -+ ! Initialize tmp2 to unit matrix -+ tmp2 = 0 -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call PZLASET('A', int(nev,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), CZERO, CONE, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else -+ call PCLASET('A', int(nev,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), CZERO, CONE, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#endif -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ call ZLASET('A',int(nev,kind=BLAS_KIND) ,int(nev,kind=BLAS_KIND) ,CZERO, CONE, tmp2, int(na,kind=BLAS_KIND)) -+#else -+ call CLASET('A',int(nev,kind=BLAS_KIND) ,int(nev,kind=BLAS_KIND) ,CZERO, CONE, tmp2, int(na,kind=BLAS_KIND)) -+#endif -+#endif /* WITH_MPI */ -+ -+ ! ! tmp1 = Z**T * Z - Unit Matrix -+ tmp1(:,:) = tmp1(:,:) - tmp2(:,:) -+ -+ ! Get maximum error (max abs value in tmp1) -+ err = maxval(abs(tmp1)) -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'%Error Orthogonality:',errmax -+ -+ if (nev .ge. 2) then -+ if (errmax .gt. tol_orth .or. errmax .eq. 0.0_rk) then -+ status = 1 -+ endif -+ else -+ if (errmax .gt. tol_orth) then -+ status = 1 -+ endif -+ endif -+ -+ deallocate(as_complex) -+ end function -+ -+#endif /* REALCASE */ -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_ss_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> double *as, complex double *z, double *ev, TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_ss_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> float *as, complex float *z, float *ev, TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* REALCASE */ -+ -+#if REALCASE == 1 -+function check_correctness_evp_numeric_residuals_ss_real_& -+&PRECISION& -+&_f (na, nev, na_rows, na_cols, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) result(status) & -+ bind(C,name="check_correctness_evp_numeric_residuals_ss_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ -+ use precision_for_tests -+ use iso_c_binding -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, value :: na, nev, myid, na_rows, na_cols, nblk, np_rows, np_cols, my_prow, my_pcol -+ real(kind=rck) :: as(1:na_rows,1:na_cols) -+ complex(kind=rck) :: z(1:na_rows,1:na_cols) -+ real(kind=rck) :: ev(1:na) -+ TEST_INT_TYPE :: sc_desc(1:9) -+ -+ status = check_correctness_evp_numeric_residuals_ss_real_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ end function -+#endif /* REALCASE */ -+ -+function check_correctness_evp_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs) result(status) -+ -+ use tests_blas_interfaces -+ use tests_scalapack_interfaces -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, intent(in) :: na, nev, nblk, myid, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=rck), intent(in) :: as(:,:), z(:,:) -+ MATH_DATATYPE(kind=rck), intent(in), optional :: bs(:,:) -+ real(kind=rk) :: ev(:) -+ MATH_DATATYPE(kind=rck), dimension(size(as,dim=1),size(as,dim=2)) :: tmp1, tmp2 -+ MATH_DATATYPE(kind=rck) :: xc -+ -+ TEST_INT_TYPE :: sc_desc(:) -+ -+ TEST_INT_TYPE :: i, rowLocal, colLocal -+ integer(kind=c_int) :: row_Local, col_Local -+ real(kind=rck) :: err, errmax -+ -+ TEST_INT_MPI_TYPE :: mpierr -+ -+! tolerance for the residual test for different math type/precision setups -+ real(kind=rk), parameter :: tol_res_real_double = 5e-12_rk -+ real(kind=rk), parameter :: tol_res_real_single = 3e-2_rk -+ real(kind=rk), parameter :: tol_res_complex_double = 5e-12_rk -+ real(kind=rk), parameter :: tol_res_complex_single = 3e-2_rk -+ real(kind=rk) :: tol_res = tol_res_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION -+ ! precision of generalized problem is lower -+ real(kind=rk), parameter :: generalized_penalty = 10.0_rk -+ -+ ! tolerance for the orthogonality test for different math type/precision setups -+ real(kind=rk), parameter :: tol_orth_real_double = 5e-11_rk -+ real(kind=rk), parameter :: tol_orth_real_single = 9e-2_rk -+ real(kind=rk), parameter :: tol_orth_complex_double = 5e-11_rk -+ real(kind=rk), parameter :: tol_orth_complex_single = 9e-3_rk -+ real(kind=rk), parameter :: tol_orth = tol_orth_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION -+ -+ if (present(bs)) then -+ tol_res = generalized_penalty * tol_res -+ endif -+ status = 0 -+ -+ ! 1. Residual (maximum of || A*Zi - Zi*EVi ||) -+ -+! tmp1 = Zi*EVi -+ tmp1(:,:) = z(:,:) -+ do i=1,nev -+ xc = ev(i) -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &scal(na, xc, tmp1, 1_BLAS_KIND, i, sc_desc, 1_BLAS_KIND) -+#else /* WITH_MPI */ -+ call BLAS_CHAR& -+ &scal(na, xc, tmp1(:,i), 1_BLAS_KIND) -+#endif /* WITH_MPI */ -+ enddo -+ -+ ! for generalized EV problem, multiply by bs as well -+ ! tmp2 = B * tmp1 -+ if(present(bs)) then -+#ifdef WITH_MPI -+ call scal_PRECISION_GEMM('N', 'N', na, nev, na, ONE, bs, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_GEMM('N','N',na,nev,na,ONE,bs,na,tmp1,na,ZERO,tmp2,na) -+#endif /* WITH_MPI */ -+ else -+ ! normal eigenvalue problem .. no need to multiply -+ tmp2(:,:) = tmp1(:,:) -+ end if -+ -+ ! tmp1 = A * Z -+ ! as is original stored matrix, Z are the EVs -+#ifdef WITH_MPI -+ call scal_PRECISION_GEMM('N', 'N', na, nev, na, ONE, as, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_GEMM('N','N',na,nev,na,ONE,as,na,z,na,ZERO,tmp1,na) -+#endif /* WITH_MPI */ -+ -+ ! tmp1 = A*Zi - Zi*EVi -+ tmp1(:,:) = tmp1(:,:) - tmp2(:,:) -+ -+ ! Get maximum norm of columns of tmp1 -+ errmax = 0.0_rk -+ -+ do i=1,nev -+#if REALCASE == 1 -+ err = 0.0_rk -+#ifdef WITH_MPI -+ call scal_PRECISION_NRM2(na, err, tmp1, 1_BLAS_KIND, i, sc_desc, 1_BLAS_KIND) -+#else /* WITH_MPI */ -+ err = PRECISION_NRM2(na,tmp1(1,i),1_BLAS_KIND) -+#endif /* WITH_MPI */ -+ errmax = max(errmax, err) -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+ xc = 0 -+#ifdef WITH_MPI -+ call scal_PRECISION_DOTC(na, xc, tmp1, 1_BLAS_KIND, i, sc_desc, & -+ 1_BLAS_KIND, tmp1, 1_BLAS_KIND, i, sc_desc, 1_BLAS_KIND) -+#else /* WITH_MPI */ -+ xc = PRECISION_DOTC(na,tmp1,1_BLAS_KIND,tmp1,1_BLAS_KIND) -+#endif /* WITH_MPI */ -+ errmax = max(errmax, sqrt(real(xc,kind=REAL_DATATYPE))) -+#endif /* COMPLEXCASE */ -+ enddo -+ -+ ! Get maximum error norm over all processors -+ err = errmax -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'Results of numerical residual checks:' -+ if (myid==0) print *,'Error Residual :',errmax -+ if (nev .ge. 2) then -+ if (errmax .gt. tol_res .or. errmax .eq. 0.0_rk) then -+ status = 1 -+ endif -+ else -+ if (errmax .gt. tol_res) then -+ status = 1 -+ endif -+ endif -+ -+ ! 2. Eigenvector orthogonality -+ if(present(bs)) then -+ !for the generalized EVP, the eigenvectors should be B-orthogonal, not orthogonal -+ ! tmp2 = B * Z -+ tmp2(:,:) = 0.0_rck -+#ifdef WITH_MPI -+ call scal_PRECISION_GEMM('N', 'N', na, nev, na, ONE, bs, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_GEMM('N','N', na, nev, na, ONE, bs, na, z, na, ZERO, tmp2, na) -+#endif /* WITH_MPI */ -+ -+ else -+ tmp2(:,:) = z(:,:) -+ endif -+ ! tmp1 = Z**T * tmp2 -+ ! actually tmp1 = Z**T * Z for standard case and tmp1 = Z**T * B * Z for generalized -+ tmp1 = 0 -+#ifdef WITH_MPI -+ call scal_PRECISION_GEMM(BLAS_TRANS_OR_CONJ, 'N', nev, nev, na, ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, & -+ tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_GEMM(BLAS_TRANS_OR_CONJ,'N',nev,nev,na,ONE,z,na,tmp2,na,ZERO,tmp1,na) -+#endif /* WITH_MPI */ -+ ! First check, whether the elements on diagonal are 1 .. "normality" of the vectors -+ err = 0.0_rk -+ do i=1, nev -+ if (map_global_array_index_to_local_index(int(i,kind=c_int), int(i,kind=c_int) , row_Local, col_Local, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) )) then -+ rowLocal = int(row_Local,kind=INT_TYPE) -+ colLocal = int(col_Local,kind=INT_TYPE) -+ err = max(err, abs(tmp1(rowLocal,colLocal) - 1.0_rk)) -+ endif -+ end do -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'Maximal error in eigenvector lengths:',errmax -+ -+ ! Second, find the maximal error in the whole Z**T * Z matrix (its diference from identity matrix) -+ ! Initialize tmp2 to unit matrix -+ tmp2 = 0 -+#ifdef WITH_MPI -+ call scal_PRECISION_LASET('A', nev, nev, ZERO, ONE, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_LASET('A',nev,nev,ZERO,ONE,tmp2,na) -+#endif /* WITH_MPI */ -+ -+ ! ! tmp1 = Z**T * Z - Unit Matrix -+ tmp1(:,:) = tmp1(:,:) - tmp2(:,:) -+ -+ ! Get maximum error (max abs value in tmp1) -+ err = maxval(abs(tmp1)) -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'Error Orthogonality:',errmax -+ -+ if (nev .ge. 2) then -+ if (errmax .gt. tol_orth .or. errmax .eq. 0.0_rk) then -+ status = 1 -+ endif -+ else -+ if (errmax .gt. tol_orth) then -+ status = 1 -+ endif -+ endif -+ end function -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> double *as, double *z, double *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> float *as, float *z, float *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_complex_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> complex double *as, complex double *z, double *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_complex_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> complex float *as, complex float *z, float *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* COMPLEXCASE */ -+ -+function check_correctness_evp_numeric_residuals_& -+&MATH_DATATYPE& -+&_& -+&PRECISION& -+&_f (na, nev, na_rows, na_cols, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) result(status) & -+ bind(C,name="check_correctness_evp_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ -+ use precision_for_tests -+ use iso_c_binding -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, value :: na, nev, myid, na_rows, na_cols, nblk, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=rck) :: as(1:na_rows,1:na_cols), z(1:na_rows,1:na_cols) -+ real(kind=rck) :: ev(1:na) -+ TEST_INT_TYPE :: sc_desc(1:9) -+ -+ status = check_correctness_evp_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ end function -+ -+!---- variant for the generalized eigenproblem -+!---- unlike in Fortran, we cannot use optional parameter -+!---- we thus define a different function -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> TEST_C_INT_TYPE check_correctness_evp_gen_numeric_residuals_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> double *as, double *z, double *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol, -+ !c> double *bs); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_gen_numeric_residuals_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> float *as, float *z, float *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, -+ !c> TEST_C_INT_TYPE my_pcol, -+ !c> float *bs); -+#endif -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+ !c> TEST_C_INT_TYPE check_correctness_evp_gen_numeric_residuals_complex_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> complex double *as, complex double *z, double *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol, -+ !c> complex double *bs); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_gen_numeric_residuals_complex_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> complex float *as, complex float *z, float *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol, -+ !c> complex float *bs); -+#endif -+#endif /* COMPLEXCASE */ -+ -+function check_correctness_evp_gen_numeric_residuals_& -+&MATH_DATATYPE& -+&_& -+&PRECISION& -+&_f (na, nev, na_rows, na_cols, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs) result(status) & -+ bind(C,name="check_correctness_evp_gen_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ -+ use iso_c_binding -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, value :: na, nev, myid, na_rows, na_cols, nblk, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=rck) :: as(1:na_rows,1:na_cols), z(1:na_rows,1:na_cols), bs(1:na_rows,1:na_cols) -+ real(kind=rck) :: ev(1:na) -+ TEST_INT_TYPE :: sc_desc(1:9) -+ -+ status = check_correctness_evp_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs) -+ -+ end function -+ -+ !----------------------------------------------------------------------------------------------------------- -+ -+ function check_correctness_eigenvalues_toeplitz_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, diagonalElement, subdiagonalElement, ev, z, myid) result(status) -+ use iso_c_binding -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status, ii, j, myid -+ TEST_INT_TYPE, intent(in) :: na -+ real(kind=rck) :: diagonalElement, subdiagonalElement -+ real(kind=rck) :: ev_analytic(na), ev(na) -+ MATH_DATATYPE(kind=rck) :: z(:,:) -+ -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ real(kind=rck), parameter :: pi = 3.141592653589793238462643383279_c_double -+#else -+ real(kind=rck), parameter :: pi = 3.1415926535897932_c_float -+#endif -+ real(kind=rck) :: tmp, maxerr -+ TEST_INT_TYPE :: loctmp -+ status = 0 -+ -+ ! analytic solution -+ do ii=1, na -+ ev_analytic(ii) = diagonalElement + 2.0_rk * & -+ subdiagonalElement *cos( pi*real(ii,kind=rk)/ & -+ real(na+1,kind=rk) ) -+ enddo -+ -+ ! sort analytic solution: -+ -+ ! this hack is neither elegant, nor optimized: for huge matrixes it might be expensive -+ ! a proper sorting algorithmus might be implemented here -+ -+ tmp = minval(ev_analytic) -+ loctmp = minloc(ev_analytic, 1) -+ -+ ev_analytic(loctmp) = ev_analytic(1) -+ ev_analytic(1) = tmp -+ do ii=2, na -+ tmp = ev_analytic(ii) -+ do j= ii, na -+ if (ev_analytic(j) .lt. tmp) then -+ tmp = ev_analytic(j) -+ loctmp = j -+ endif -+ enddo -+ ev_analytic(loctmp) = ev_analytic(ii) -+ ev_analytic(ii) = tmp -+ enddo -+ -+ ! compute a simple error max of eigenvalues -+ maxerr = 0.0 -+ maxerr = maxval( (ev(:) - ev_analytic(:))/ev_analytic(:) , 1) -+ -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ if (maxerr .gt. 8.e-13_c_double .or. maxerr .eq. 0.0_c_double) then -+#else -+ if (maxerr .gt. 8.e-4_c_float .or. maxerr .eq. 0.0_c_float) then -+#endif -+ status = 1 -+ if (myid .eq. 0) then -+ print *,"Result of Toeplitz matrix test: " -+ print *,"Eigenvalues differ from analytic solution: maxerr = ",maxerr -+ endif -+ endif -+ -+ if (status .eq. 0) then -+ if (myid .eq. 0) then -+ print *,"Result of Toeplitz matrix test: test passed" -+ print *,"Eigenvalues differ from analytic solution: maxerr = ",maxerr -+ endif -+ endif -+ end function -+ -+ function check_correctness_cholesky_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, a, as, na_rows, sc_desc, myid) result(status) -+ use precision_for_tests -+ use tests_blas_interfaces -+ use tests_scalapack_interfaces -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, intent(in) :: na, myid, na_rows -+ -+ MATH_DATATYPE(kind=rck), intent(in) :: a(:,:), as(:,:) -+ MATH_DATATYPE(kind=rck), dimension(size(as,dim=1),size(as,dim=2)) :: tmp1, tmp2 -+#if COMPLEXCASE == 1 -+ ! needed for [z,c]lange from scalapack -+ real(kind=rk), dimension(2*size(as,dim=1),size(as,dim=2)) :: tmp1_real -+#endif -+ real(kind=rk) :: norm, normmax -+ -+ TEST_INT_TYPE :: sc_desc(:) -+ real(kind=rck) :: err, errmax -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ status = 0 -+ tmp1(:,:) = 0.0_rck -+ -+ -+#if REALCASE == 1 -+ ! tmp1 = a**T -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &tran(na, na, 1.0_rck, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ 0.0_rck, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ tmp1 = transpose(a) -+#endif /* WITH_MPI */ -+#endif /* REALCASE == 1 */ -+ -+#if COMPLEXCASE == 1 -+ ! tmp1 = a**H -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &tranc(na, na, ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ tmp1 = transpose(conjg(a)) -+#endif /* WITH_MPI */ -+#endif /* COMPLEXCASE == 1 */ -+ -+ ! tmp2 = a**T * a -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &gemm("N","N", na, na, na, ONE, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ a, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, ZERO, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call BLAS_CHAR& -+ &gemm("N","N", na, na, na, ONE, tmp1, na, a, na, ZERO, tmp2, na) -+#endif /* WITH_MPI */ -+ -+ ! compare tmp2 with original matrix -+ tmp2(:,:) = tmp2(:,:) - as(:,:) -+ -+#ifdef WITH_MPI -+ norm = p& -+ &BLAS_CHAR& -+ &lange("M",na, na, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+#if COMPLEXCASE == 1 -+ tmp1_real) -+#else -+ tmp1) -+#endif -+#else /* WITH_MPI */ -+ norm = BLAS_CHAR& -+ &lange("M", na, na, tmp2, na_rows, & -+#if COMPLEXCASE == 1 -+ tmp1_real) -+#else -+ tmp1) -+#endif -+#endif /* WITH_MPI */ -+ -+ -+#ifdef WITH_MPI -+ call mpi_allreduce(norm, normmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ normmax = norm -+#endif /* WITH_MPI */ -+ -+ if (myid .eq. 0) then -+ print *," Maximum error of result: ", normmax -+ endif -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+! if (normmax .gt. 5e-12_rk8 .or. normmax .eq. 0.0_rk8) then -+ if (normmax .gt. 5e-12_rk8) then -+ status = 1 -+ endif -+#else -+! if (normmax .gt. 5e-4_rk4 .or. normmax .eq. 0.0_rk4) then -+ if (normmax .gt. 5e-4_rk4 ) then -+ status = 1 -+ endif -+#endif -+#endif -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+! if (normmax .gt. 5e-11_rk8 .or. normmax .eq. 0.0_rk8) then -+ if (normmax .gt. 5e-11_rk8 ) then -+ status = 1 -+ endif -+#else -+! if (normmax .gt. 5e-3_rk4 .or. normmax .eq. 0.0_rk4) then -+ if (normmax .gt. 5e-3_rk4) then -+ status = 1 -+ endif -+#endif -+#endif -+ end function -+ -+ function check_correctness_hermitian_multiply_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, a, b, c, na_rows, sc_desc, myid) result(status) -+ use precision_for_tests -+ use tests_blas_interfaces -+ use tests_scalapack_interfaces -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, intent(in) :: na, myid, na_rows -+ MATH_DATATYPE(kind=rck), intent(in) :: a(:,:), b(:,:), c(:,:) -+ MATH_DATATYPE(kind=rck), dimension(size(a,dim=1),size(a,dim=2)) :: tmp1, tmp2 -+#if COMPLEXCASE == 1 -+ real(kind=rk), dimension(2*size(a,dim=1),size(a,dim=2)) :: tmp1_real -+#endif -+ real(kind=rck) :: norm, normmax -+ -+ -+ TEST_INT_TYPE :: sc_desc(:) -+ real(kind=rck) :: err, errmax -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ status = 0 -+ tmp1(:,:) = ZERO -+ -+#if REALCASE == 1 -+ ! tmp1 = a**T -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &tran(na, na, ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ tmp1 = transpose(a) -+#endif /* WITH_MPI */ -+ -+#endif /* REALCASE == 1 */ -+ -+#if COMPLEXCASE == 1 -+ ! tmp1 = a**H -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &tranc(na, na, ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ tmp1 = transpose(conjg(a)) -+#endif /* WITH_MPI */ -+#endif /* COMPLEXCASE == 1 */ -+ -+ ! tmp2 = tmp1 * b -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &gemm("N","N", na, na, na, ONE, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, b, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, ZERO, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else -+ call BLAS_CHAR& -+ &gemm("N","N", na, na, na, ONE, tmp1, na, b, na, ZERO, tmp2, na) -+#endif -+ -+ ! compare tmp2 with c -+ tmp2(:,:) = tmp2(:,:) - c(:,:) -+ -+#ifdef WITH_MPI -+ ! dirty hack: the last argument should be a real array, but is not referenced -+ ! if mode = "M", thus we get away with a complex argument -+ norm = p& -+ &BLAS_CHAR& -+ &lange("M", na, na, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+#if COMPLEXCASE == 1 -+ tmp1_real) -+#else -+ tmp1) -+#endif -+#else /* WITH_MPI */ -+ ! dirty hack: the last argument should be a real array, but is not referenced -+ ! if mode = "M", thus we get away with a complex argument -+ norm = BLAS_CHAR& -+ &lange("M", na, na, tmp2, na_rows, & -+#if COMPLEXCASE == 1 -+ tmp1_real) -+#else -+ tmp1) -+#endif -+#endif /* WITH_MPI */ -+ -+#ifdef WITH_MPI -+ call mpi_allreduce(norm, normmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ normmax = norm -+#endif /* WITH_MPI */ -+ -+ if (myid .eq. 0) then -+ print *," Maximum error of result: ", normmax -+ endif -+ -+#ifdef DOUBLE_PRECISION_REAL -+ if (normmax .gt. 5e-11_rk8 ) then -+ status = 1 -+ endif -+#else -+ if (normmax .gt. 5e-3_rk4 ) then -+ status = 1 -+ endif -+#endif -+ -+#ifdef DOUBLE_PRECISION_COMPLEX -+ if (normmax .gt. 5e-11_rk8 ) then -+ status = 1 -+ endif -+#else -+ if (normmax .gt. 5e-3_rk4 ) then -+ status = 1 -+ endif -+#endif -+ end function -+ -+ function check_correctness_eigenvalues_frank_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, ev, z, myid) result(status) -+ use iso_c_binding -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status, i, j, myid -+ TEST_INT_TYPE, intent(in) :: na -+ real(kind=rck) :: ev_analytic(na), ev(na) -+ MATH_DATATYPE(kind=rck) :: z(:,:) -+ -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ real(kind=rck), parameter :: pi = 3.141592653589793238462643383279_c_double -+#else -+ real(kind=rck), parameter :: pi = 3.1415926535897932_c_float -+#endif -+ real(kind=rck) :: tmp, maxerr -+ TEST_INT_TYPE :: loctmp -+ status = 0 -+ -+ ! analytic solution -+ do i = 1, na -+ j = na - i -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ ev_analytic(i) = pi * (2.0_c_double * real(j,kind=c_double) + 1.0_c_double) / & -+ (2.0_c_double * real(na,kind=c_double) + 1.0_c_double) -+ ev_analytic(i) = 0.5_c_double / (1.0_c_double - cos(ev_analytic(i))) -+#else -+ ev_analytic(i) = pi * (2.0_c_float * real(j,kind=c_float) + 1.0_c_float) / & -+ (2.0_c_float * real(na,kind=c_float) + 1.0_c_float) -+ ev_analytic(i) = 0.5_c_float / (1.0_c_float - cos(ev_analytic(i))) -+#endif -+ enddo -+ -+ ! sort analytic solution: -+ -+ ! this hack is neither elegant, nor optimized: for huge matrixes it might be expensive -+ ! a proper sorting algorithmus might be implemented here -+ -+ tmp = minval(ev_analytic) -+ loctmp = minloc(ev_analytic, 1) -+ -+ ev_analytic(loctmp) = ev_analytic(1) -+ ev_analytic(1) = tmp -+ do i=2, na -+ tmp = ev_analytic(i) -+ do j= i, na -+ if (ev_analytic(j) .lt. tmp) then -+ tmp = ev_analytic(j) -+ loctmp = j -+ endif -+ enddo -+ ev_analytic(loctmp) = ev_analytic(i) -+ ev_analytic(i) = tmp -+ enddo -+ -+ ! compute a simple error max of eigenvalues -+ maxerr = 0.0 -+ maxerr = maxval( (ev(:) - ev_analytic(:))/ev_analytic(:) , 1) -+ -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ if (maxerr .gt. 8.e-13_c_double) then -+#else -+ if (maxerr .gt. 8.e-4_c_float) then -+#endif -+ status = 1 -+ if (myid .eq. 0) then -+ print *,"Result of Frank matrix test: " -+ print *,"Eigenvalues differ from analytic solution: maxerr = ",maxerr -+ endif -+ endif -+ end function -+ -+! vim: syntax=fortran -diff -ruN elpa-2020.11.001/examples/shared/test_output_type.F90 elpa-2020.11.001_ok/examples/shared/test_output_type.F90 ---- elpa-2020.11.001/examples/shared/test_output_type.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_output_type.F90 2021-02-02 12:54:50.049335000 +0100 -@@ -0,0 +1,11 @@ -+#include "config-f90.h" -+ -+module test_output_type -+ -+ type :: output_t -+ logical :: eigenvectors -+ logical :: eigenvalues -+ end type -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_precision_kinds.F90 elpa-2020.11.001_ok/examples/shared/test_precision_kinds.F90 ---- elpa-2020.11.001/examples/shared/test_precision_kinds.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_precision_kinds.F90 2021-02-02 12:54:50.048528000 +0100 -@@ -0,0 +1,25 @@ -+#ifdef REALCASE -+#ifdef DOUBLE_PRECISION -+ integer, parameter :: rk = C_DOUBLE -+ integer, parameter :: rck = C_DOUBLE -+#endif -+#ifdef SINGLE_PRECISION -+ integer, parameter :: rk = C_FLOAT -+ integer, parameter :: rck = C_FLOAT -+#endif -+ real(kind=rck), parameter :: ZERO=0.0_rk, ONE = 1.0_rk -+#endif -+ -+#ifdef COMPLEXCASE -+#ifdef DOUBLE_PRECISION -+ integer, parameter :: rk = C_DOUBLE -+ integer, parameter :: ck = C_DOUBLE_COMPLEX -+ integer, parameter :: rck = C_DOUBLE_COMPLEX -+#endif -+#ifdef SINGLE_PRECISION -+ integer, parameter :: rk = C_FLOAT -+ integer, parameter :: ck = C_FLOAT_COMPLEX -+ integer, parameter :: rck = C_FLOAT_COMPLEX -+#endif -+ complex(kind=rck), parameter :: ZERO = (0.0_rk,0.0_rk), ONE = (1.0_rk,0.0_rk) -+#endif -diff -ruN elpa-2020.11.001/examples/shared/test_prepare_matrix.F90 elpa-2020.11.001_ok/examples/shared/test_prepare_matrix.F90 ---- elpa-2020.11.001/examples/shared/test_prepare_matrix.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_prepare_matrix.F90 2021-02-02 12:54:50.056514000 +0100 -@@ -0,0 +1,145 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! Author: A. Marek, MPCDF -+#include "config-f90.h" -+ -+module test_prepare_matrix -+ -+ use precision_for_tests -+ interface prepare_matrix_random -+ module procedure prepare_matrix_random_complex_double -+ module procedure prepare_matrix_random_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_random_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_random_complex_single -+#endif -+ end interface -+ -+ -+ interface prepare_matrix_random_spd -+ module procedure prepare_matrix_random_spd_complex_double -+ module procedure prepare_matrix_random_spd_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_random_spd_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_random_spd_complex_single -+#endif -+ end interface -+ -+ -+ interface prepare_matrix_toeplitz -+ module procedure prepare_matrix_toeplitz_complex_double -+ module procedure prepare_matrix_toeplitz_real_double -+ module procedure prepare_matrix_toeplitz_mixed_complex_complex_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_toeplitz_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_toeplitz_complex_single -+ module procedure prepare_matrix_toeplitz_mixed_complex_complex_single -+#endif -+ end interface -+ -+ interface prepare_matrix_frank -+ module procedure prepare_matrix_frank_complex_double -+ module procedure prepare_matrix_frank_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_frank_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_frank_complex_single -+#endif -+ end interface -+ -+ -+ -+ private prows, pcols, map_global_array_index_to_local_index -+ -+ contains -+ -+#include "../../src/general/prow_pcol.F90" -+#include "../../src/general/map_global_to_local.F90" -+ -+#define COMPLEXCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_prepare_matrix_template.F90" -+#undef DOUBLE_PRECISION -+#undef COMPLEXCASE -+ -+ -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ -+ -+#define COMPLEXCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_prepare_matrix_template.F90" -+#undef SINGLE_PRECISION -+#undef COMPLEXCASE -+#endif /* WANT_SINGLE_PRECISION_COMPLEX */ -+ -+ -+#define REALCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_prepare_matrix_template.F90" -+#undef DOUBLE_PRECISION -+#undef REALCASE -+ -+#ifdef WANT_SINGLE_PRECISION_REAL -+ -+ -+#define REALCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_prepare_matrix_template.F90" -+#undef SINGLE_PRECISION -+#undef REALCASE -+ -+#endif /* WANT_SINGLE_PRECISION_REAL */ -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_prepare_matrix_template.F90 elpa-2020.11.001_ok/examples/shared/test_prepare_matrix_template.F90 ---- elpa-2020.11.001/examples/shared/test_prepare_matrix_template.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_prepare_matrix_template.F90 2021-02-02 12:54:50.043491000 +0100 -@@ -0,0 +1,510 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! Author: A. Marek, MPCDF -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#define TEST_C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#define TEST_C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#endif -+ -+ -+ subroutine prepare_matrix_random_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as, is_skewsymmetric) -+ -+ -+ !use test_util -+ use tests_scalapack_interfaces -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: myid, na, sc_desc(:) -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:), a(:,:), as(:,:) -+ -+#if COMPLEXCASE == 1 -+ real(kind=rk) :: xr(size(a,dim=1), size(a,dim=2)) -+#endif /* COMPLEXCASE */ -+ -+ integer(kind=c_int), allocatable :: iseed(:) -+ integer(kind=c_int) :: n -+ integer(kind=c_int), intent(in), optional :: is_skewsymmetric -+ logical :: skewsymmetric -+ -+ if (present(is_skewsymmetric)) then -+ if (is_skewsymmetric .eq. 1) then -+ skewsymmetric = .true. -+ else -+ skewsymmetric = .false. -+ endif -+ else -+ skewsymmetric = .false. -+ endif -+ -+ ! for getting a hermitian test matrix A we get a random matrix Z -+ ! and calculate A = Z + Z**H -+ ! in case of a skewsymmetric matrix A = Z - Z**H -+ -+ ! we want different random numbers on every process -+ ! (otherwise A might get rank deficient): -+ -+ call random_seed(size=n) -+ allocate(iseed(n)) -+ iseed(:) = myid -+ call random_seed(put=iseed) -+#if REALCASE == 1 -+ call random_number(z) -+ -+ a(:,:) = z(:,:) -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+ call random_number(xr) -+ -+ z(:,:) = xr(:,:) -+ call RANDOM_NUMBER(xr) -+ z(:,:) = z(:,:) + (0.0_rk,1.0_rk)*xr(:,:) -+ a(:,:) = z(:,:) -+#endif /* COMPLEXCASE */ -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)' -+ endif -+ -+#if REALCASE == 1 -+#ifdef WITH_MPI -+ if (skewsymmetric) then -+ call p& -+ &BLAS_CHAR& -+ &tran(int(na,kind=BLAS_KIND), int(na,kind=BLAS_KIND), -ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) ! A = A + Z**T -+ else -+ call p& -+ &BLAS_CHAR& -+ &tran(int(na,kind=BLAS_KIND), int(na,kind=BLAS_KIND), ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) ! A = A + Z**T -+ endif -+#else /* WITH_MPI */ -+ if (skewsymmetric) then -+ a = a - transpose(z) -+ else -+ a = a + transpose(z) -+ endif -+#endif /* WITH_MPI */ -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef WITH_MPI -+ if (skewsymmetric) then -+ call p& -+ &BLAS_CHAR& -+ &tranc(int(na,kind=BLAS_KIND), int(na,kind=BLAS_KIND), -ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) ! A = A + Z**H -+ else -+ call p& -+ &BLAS_CHAR& -+ &tranc(int(na,kind=BLAS_KIND), int(na,kind=BLAS_KIND), ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) ! A = A + Z**H -+ endif -+#else /* WITH_MPI */ -+ if (skewsymmetric) then -+ a = a - transpose(conjg(z)) -+ else -+ a = a + transpose(conjg(z)) -+ endif -+#endif /* WITH_MPI */ -+#endif /* COMPLEXCASE */ -+ -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been symmetrized' -+ endif -+ -+ ! save original matrix A for later accuracy checks -+ -+ as = a -+ -+ deallocate(iseed) -+ -+ end subroutine -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> void prepare_matrix_random_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> double *a, double *z, double *as); -+#else -+ !c> void prepare_matrix_random_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> float *a, float *z, float *as); -+#endif -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+ !c> void prepare_matrix_random_complex_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> complex double *a, complex double *z, complex double *as); -+#else -+ !c> void prepare_matrix_random_complex_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> complex float *a, complex float *z, complex float *as); -+#endif -+#endif /* COMPLEXCASE */ -+ -+subroutine prepare_matrix_random_& -+&MATH_DATATYPE& -+&_wrapper_& -+&PRECISION& -+& (na, myid, na_rows, na_cols, sc_desc, a, z, as) & -+ bind(C, name="prepare_matrix_random_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ use iso_c_binding -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE , value :: myid, na, na_rows, na_cols -+ TEST_INT_TYPE :: sc_desc(1:9) -+ MATH_DATATYPE(kind=rck) :: z(1:na_rows,1:na_cols), a(1:na_rows,1:na_cols), & -+ as(1:na_rows,1:na_cols) -+ call prepare_matrix_random_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as) -+ end subroutine -+ -+!---------------------------------------------------------------------------------------------------------------- -+ -+ subroutine prepare_matrix_random_spd_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ -+ !use test_util -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: myid, na, sc_desc(:) -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:), a(:,:), as(:,:) -+ TEST_INT_TYPE, intent(in) :: nblk, np_rows, np_cols, my_prow, my_pcol -+ -+ TEST_INT_TYPE :: ii -+ integer(kind=c_int) :: rowLocal, colLocal -+ -+ -+ call prepare_matrix_random_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as) -+ -+ ! hermitian diagonaly dominant matrix => positive definite -+ do ii=1, na -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii,kind=c_int), & -+ rowLocal, colLocal, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) )) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = & -+ real(a(int(rowLocal,kind=INT_TYPE), int(colLocal,kind=INT_TYPE))) + na + 1 -+ end if -+ end do -+ -+ as = a -+ -+ end subroutine -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> void prepare_matrix_random_spd_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> double *a, double *z, double *as, -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> void prepare_matrix_random_spd_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> float *a, float *z, float *as, -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+ !c> void prepare_matrix_random_spd_complex_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> complex double *a, complex double *z, complex double *as, -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> void prepare_matrix_random_spd_complex_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> complex float *a, complex float *z, complex float *as, -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* COMPLEXCASE */ -+ -+subroutine prepare_matrix_random_spd_& -+&MATH_DATATYPE& -+&_wrapper_& -+&PRECISION& -+& (na, myid, na_rows, na_cols, sc_desc, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) & -+ bind(C, name="prepare_matrix_random_spd_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ use iso_c_binding -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE , value :: myid, na, na_rows, na_cols -+ TEST_INT_TYPE :: sc_desc(1:9) -+ MATH_DATATYPE(kind=rck) :: z(1:na_rows,1:na_cols), a(1:na_rows,1:na_cols), & -+ as(1:na_rows,1:na_cols) -+ TEST_INT_TYPE , value :: nblk, np_rows, np_cols, my_prow, my_pcol -+ call prepare_matrix_random_spd_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ end subroutine -+ -+ -+!---------------------------------------------------------------------------------------------------------------- -+ -+ subroutine prepare_matrix_toeplitz_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, diagonalElement, subdiagonalElement, d, sd, ds, sds, a, as, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+ !use test_util -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE, intent(in) :: na, nblk, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=rck) :: diagonalElement, subdiagonalElement -+ MATH_DATATYPE(kind=rck) :: d(:), sd(:), ds(:), sds(:) -+ MATH_DATATYPE(kind=rck) :: a(:,:), as(:,:) -+ -+ TEST_INT_TYPE :: ii -+ integer(kind=c_int) :: rowLocal, colLocal -+ -+ d(:) = diagonalElement -+ sd(:) = subdiagonalElement -+ a(:,:) = ZERO -+ -+ ! set up the diagonal and subdiagonals (for general solver test) -+ do ii=1, na ! for diagonal elements -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) ) ) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = diagonalElement -+ endif -+ enddo -+ do ii=1, na-1 -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii+1,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) ) ) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = subdiagonalElement -+ endif -+ enddo -+ -+ do ii=2, na -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii-1,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) ) ) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = subdiagonalElement -+ endif -+ enddo -+ -+ ds = d -+ sds = sd -+ as = a -+ end subroutine -+ -+ subroutine prepare_matrix_toeplitz_mixed_complex& -+ &_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+#if COMPLEXCASE == 1 -+ & (na, diagonalElement, subdiagonalElement, d, sd, ds, sds, a, as, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+#endif -+#if REALCASE == 1 -+ & (na, diagonalElement, subdiagonalElement, d, sd, ds, sds, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+#endif -+ !use test_util -+ implicit none -+ -+ TEST_INT_TYPE, intent(in) :: na, nblk, np_rows, np_cols, my_prow, my_pcol -+ real(kind=C_DATATYPE_KIND) :: diagonalElement, subdiagonalElement -+ -+ real(kind=C_DATATYPE_KIND) :: d(:), sd(:), ds(:), sds(:) -+ -+#if COMPLEXCASE == 1 -+ complex(kind=C_DATATYPE_KIND) :: a(:,:), as(:,:) -+#endif -+#if REALCASE == 1 -+#endif -+ -+ TEST_INT_TYPE :: ii -+ integer(kind=c_int) :: rowLocal, colLocal -+#if COMPLEXCASE == 1 -+ d(:) = diagonalElement -+ sd(:) = subdiagonalElement -+ -+ ! set up the diagonal and subdiagonals (for general solver test) -+ do ii=1, na ! for diagonal elements -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), & -+ int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = diagonalElement -+ endif -+ enddo -+ do ii=1, na-1 -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii+1,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), & -+ int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = subdiagonalElement -+ endif -+ enddo -+ -+ do ii=2, na -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii-1,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), & -+ int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = subdiagonalElement -+ endif -+ enddo -+ -+ ds = d -+ sds = sd -+ as = a -+#endif -+ end subroutine -+ -+ subroutine prepare_matrix_frank_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ !use test_util -+ use precision_for_tests -+ implicit none -+ -+ TEST_INT_TYPE, intent(in) :: na, nblk, np_rows, np_cols, my_prow, my_pcol -+ -+#if REALCASE == 1 -+ real(kind=C_DATATYPE_KIND) :: a(:,:), z(:,:), as(:,:) -+#endif -+#if COMPLEXCASE == 1 -+ complex(kind=C_DATATYPE_KIND) :: a(:,:), z(:,:), as(:,:) -+#endif -+ -+ TEST_INT_TYPE :: i, j -+ integer(kind=c_int) :: rowLocal, colLocal -+ -+ do i = 1, na -+ do j = 1, na -+ if (map_global_array_index_to_local_index(int(i,kind=c_int), int(j,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), & -+ int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ if (j .le. i) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = & -+ real((na+1-i), kind=C_DATATYPE_KIND) / real(na, kind=C_DATATYPE_KIND) -+ else -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = & -+ real((na+1-j), kind=C_DATATYPE_KIND) / real(na, kind=C_DATATYPE_KIND) -+ endif -+ endif -+ enddo -+ enddo -+ -+ z(:,:) = a(:,:) -+ as(:,:) = a(:,:) -+ -+ end subroutine -+ -+ -+! vim: syntax=fortran -diff -ruN elpa-2020.11.001/examples/shared/test_read_input_parameters.F90 elpa-2020.11.001_ok/examples/shared/test_read_input_parameters.F90 ---- elpa-2020.11.001/examples/shared/test_read_input_parameters.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_read_input_parameters.F90 2021-02-02 12:54:50.051994000 +0100 -@@ -0,0 +1,455 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+module test_read_input_parameters -+ use elpa, only : ELPA_2STAGE_COMPLEX_DEFAULT, ELPA_2STAGE_REAL_DEFAULT, elpa_int_string_to_value, & -+ elpa_int_value_to_string, ELPA_OK -+ use elpa_utilities, only : error_unit -+ use iso_c_binding -+ use test_util, only : x_ao, x_a -+ use test_output_type -+ -+ implicit none -+ -+ type input_options_t -+ TEST_INT_TYPE :: datatype -+ TEST_INT_TYPE :: na, nev, nblk -+ type(output_t) :: write_to_file -+ TEST_INT_TYPE :: this_real_kernel, this_complex_kernel -+ logical :: realKernelIsSet, complexKernelIsSet -+ TEST_INT_TYPE :: useQrIsSet, useGPUIsSet -+ logical :: doSolveTridi, do1stage, do2stage, justHelpMessage, & -+ doCholesky, doInvertTrm, doTransposeMultiply -+ end type -+ -+ interface read_input_parameters -+ module procedure read_input_parameters_general -+ module procedure read_input_parameters_traditional -+ module procedure read_input_parameters_traditional_noskip -+ end interface -+ -+ contains -+ -+ subroutine parse_arguments(command_line_argument, input_options) -+ implicit none -+ -+ type(input_options_t) :: input_options -+ character(len=128) :: command_line_argument -+ integer(kind=c_int) :: elpa_error -+ -+ if (command_line_argument == "--help") then -+ print *,"usage: elpa_tests [--help] [datatype={real|complex}] [na=number] [nev=number] " -+ print *," [nblk=size of block cyclic distribution] [--output_eigenvalues]" -+ print *," [--output_eigenvectors] [--real-kernel=name_of_kernel]" -+ print *," [--complex-kernel=name_of_kernel] [--use-gpu={0|1}]" -+ print *," [--use-qr={0,1}] [--tests={all|solve-tridi|1stage|2stage|cholesky& -+ &|invert-triangular|transpose-mulitply}]" -+ input_options%justHelpMessage=.true. -+ return -+ endif -+ -+ -+ if (command_line_argument(1:11) == "--datatype=") then -+ if (command_line_argument(12:15) == "real") then -+ input_options%datatype=1 -+ else -+ if (command_line_argument(12:18) == "complex") then -+ input_options%datatype=2 -+ else -+ print *,"datatype unknown! use either --datatype=real or --datatpye=complex" -+ stop 1 -+ endif -+ endif -+ endif -+ -+ if (command_line_argument(1:3) == "na=") then -+ read(command_line_argument(4:), *) input_options%na -+ endif -+ if (command_line_argument(1:4) == "nev=") then -+ read(command_line_argument(5:), *) input_options%nev -+ endif -+ if (command_line_argument(1:5) == "nblk=") then -+ read(command_line_argument(6:), *) input_options%nblk -+ endif -+ -+ if (command_line_argument(1:21) == "--output_eigenvectors") then -+ input_options%write_to_file%eigenvectors = .true. -+ endif -+ -+ if (command_line_argument(1:20) == "--output_eigenvalues") then -+ input_options%write_to_file%eigenvalues = .true. -+ endif -+ -+ if (command_line_argument(1:14) == "--real-kernel=") then -+ input_options%this_real_kernel = int(elpa_int_string_to_value("real_kernel", & -+ command_line_argument(15:), elpa_error), & -+ kind=INT_TYPE) -+ if (elpa_error /= ELPA_OK) then -+ print *, "Invalid argument for --real-kernel" -+ stop 1 -+ endif -+ print *,"Setting ELPA2 real kernel to ", elpa_int_value_to_string("real_kernel", & -+ int(input_options%this_real_kernel,kind=c_int)) -+ input_options%realKernelIsSet = .true. -+ endif -+ -+ if (command_line_argument(1:17) == "--complex-kernel=") then -+ input_options%this_complex_kernel = int(elpa_int_string_to_value("complex_kernel", & -+ command_line_argument(18:), elpa_error), kind=INT_TYPE) -+ if (elpa_error /= ELPA_OK) then -+ print *, "Invalid argument for --complex-kernel" -+ stop 1 -+ endif -+ print *,"Setting ELPA2 complex kernel to ", elpa_int_value_to_string("complex_kernel", & -+ int(input_options%this_complex_kernel,kind=c_int)) -+ input_options%complexKernelIsSet = .true. -+ endif -+ -+ if (command_line_argument(1:9) == "--use-qr=") then -+ read(command_line_argument(10:), *) input_options%useQrIsSet -+ endif -+ -+ if (command_line_argument(1:10) == "--use-gpu=") then -+ read(command_line_argument(11:), *) input_options%useGPUIsSet -+ endif -+ -+ if (command_line_argument(1:8) == "--tests=") then -+ if (command_line_argument(9:11) == "all") then -+ input_options%doSolveTridi=.true. -+ input_options%do1stage=.true. -+ input_options%do2stage=.true. -+ input_options%doCholesky=.true. -+ input_options%doInvertTrm=.true. -+ input_options%doTransposeMultiply=.true. -+ else if (command_line_argument(9:19) == "solve-tride") then -+ input_options%doSolveTridi=.true. -+ input_options%do1stage=.false. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:14) == "1stage") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.true. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:14) == "2stage") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.false. -+ input_options%do2stage=.true. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:16) == "cholesky") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.false. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.true. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:25) == "invert-triangular") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.false. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.true. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:26) == "transpose-multiply") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.false. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.true. -+ else -+ print *,"unknown test specified" -+ stop 1 -+ endif -+ endif -+ -+ end subroutine -+ -+ subroutine read_input_parameters_general(input_options) -+ use precision_for_tests -+ implicit none -+ -+ type(input_options_t) :: input_options -+ -+ ! Command line arguments -+ character(len=128) :: arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 -+ -+ ! default parameters -+ input_options%datatype = 1 -+ input_options%na = 500 -+ input_options%nev = 150 -+ input_options%nblk = 16 -+ -+ input_options%write_to_file%eigenvectors = .false. -+ input_options%write_to_file%eigenvalues = .false. -+ -+ input_options%this_real_kernel = ELPA_2STAGE_REAL_DEFAULT -+ input_options%this_complex_kernel = ELPA_2STAGE_COMPLEX_DEFAULT -+ input_options%realKernelIsSet = .false. -+ input_options%complexKernelIsSet = .false. -+ -+ input_options%useQrIsSet = 0 -+ -+ input_options%useGPUIsSet = 0 -+ -+ input_options%do1Stage = .true. -+ input_options%do2Stage = .true. -+ input_options%doSolveTridi = .true. -+ input_options%doCholesky=.true. -+ input_options%doInvertTrm=.true. -+ input_options%doTransposeMultiply=.true. -+ input_options%justHelpMessage=.false. -+ -+ ! test na=1500 nev=50 nblk=16 --help --kernel --output_eigenvectors --output_eigenvalues -+ if (COMMAND_ARGUMENT_COUNT() .gt. 8) then -+ write(error_unit, '(a,i0,a)') "Invalid number (", COMMAND_ARGUMENT_COUNT(), ") of command line arguments!" -+ stop 1 -+ endif -+ -+ if (COMMAND_ARGUMENT_COUNT() .gt. 0) then -+ -+ call get_COMMAND_ARGUMENT(1, arg1) -+ -+ call parse_arguments(arg1, input_options) -+ -+ -+ -+ if (COMMAND_ARGUMENT_COUNT() .ge. 2) then -+ ! argument 2 -+ call get_COMMAND_ARGUMENT(2, arg2) -+ -+ call parse_arguments(arg2, input_options) -+ endif -+ -+ ! argument 3 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 3) then -+ -+ call get_COMMAND_ARGUMENT(3, arg3) -+ -+ call parse_arguments(arg3, input_options) -+ endif -+ -+ ! argument 4 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 4) then -+ -+ call get_COMMAND_ARGUMENT(4, arg4) -+ -+ call parse_arguments(arg4, input_options) -+ -+ endif -+ -+ ! argument 5 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 5) then -+ -+ call get_COMMAND_ARGUMENT(5, arg5) -+ -+ call parse_arguments(arg5, input_options) -+ endif -+ -+ ! argument 6 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 6) then -+ -+ call get_COMMAND_ARGUMENT(6, arg6) -+ -+ call parse_arguments(arg6, input_options) -+ endif -+ -+ ! argument 7 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 7) then -+ -+ call get_COMMAND_ARGUMENT(7, arg7) -+ -+ call parse_arguments(arg7, input_options) -+ -+ endif -+ -+ ! argument 8 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 8) then -+ -+ call get_COMMAND_ARGUMENT(8, arg8) -+ -+ call parse_arguments(arg8, input_options) -+ -+ endif -+ -+ ! argument 9 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 9) then -+ -+ call get_COMMAND_ARGUMENT(9, arg9) -+ -+ call parse_arguments(arg8, input_options) -+ -+ endif -+ -+ ! argument 10 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 10) then -+ -+ call get_COMMAND_ARGUMENT(10, arg10) -+ -+ call parse_arguments(arg8, input_options) -+ -+ endif -+ -+ endif -+ -+ if (input_options%useQrIsSet .eq. 1 .and. input_options%datatype .eq. 2) then -+ print *,"You cannot use QR-decomposition in complex case" -+ stop 1 -+ endif -+ -+ end subroutine -+ -+ subroutine read_input_parameters_traditional_noskip(na, nev, nblk, write_to_file) -+ use precision_for_tests -+ implicit none -+ -+ TEST_INT_TYPE, intent(out) :: na, nev, nblk -+ -+ type(output_t), intent(out) :: write_to_file -+ logical :: skip_check_correctness -+ -+ call read_input_parameters_traditional(na, nev, nblk, write_to_file, skip_check_correctness) -+ end subroutine -+ -+ subroutine read_input_parameters_traditional(na, nev, nblk, write_to_file, skip_check_correctness) -+ use precision_for_tests -+ implicit none -+ -+ TEST_INT_TYPE, intent(out) :: na, nev, nblk -+ -+ type(output_t), intent(out) :: write_to_file -+ logical, intent(out) :: skip_check_correctness -+ -+ ! Command line arguments -+ character(len=128) :: arg1, arg2, arg3, arg4, arg5 -+ -+ ! default parameters -+ na = 5000 -+ nev = 150 -+ nblk = 16 -+ write_to_file%eigenvectors = .false. -+ write_to_file%eigenvalues = .false. -+ skip_check_correctness = .false. -+ -+ if (.not. any(COMMAND_ARGUMENT_COUNT() == [0, 3, 4, 5])) then -+ write(error_unit, '(a,i0,a)') "Invalid number (", COMMAND_ARGUMENT_COUNT(), ") of command line arguments!" -+ write(error_unit, *) "Expected: program [ [matrix_size num_eigenvalues block_size] & -+ ""output_eigenvalues"" ""output_eigenvectors""]" -+ stop 1 -+ endif -+ -+ if (COMMAND_ARGUMENT_COUNT() == 3) then -+ call GET_COMMAND_ARGUMENT(1, arg1) -+ call GET_COMMAND_ARGUMENT(2, arg2) -+ call GET_COMMAND_ARGUMENT(3, arg3) -+ -+ read(arg1, *) na -+ read(arg2, *) nev -+ read(arg3, *) nblk -+ endif -+ -+ if (COMMAND_ARGUMENT_COUNT() == 4) then -+ call GET_COMMAND_ARGUMENT(1, arg1) -+ call GET_COMMAND_ARGUMENT(2, arg2) -+ call GET_COMMAND_ARGUMENT(3, arg3) -+ call GET_COMMAND_ARGUMENT(4, arg4) -+ read(arg1, *) na -+ read(arg2, *) nev -+ read(arg3, *) nblk -+ -+ if (arg4 .eq. "output_eigenvalues") then -+ write_to_file%eigenvalues = .true. -+ elseif (arg4 .eq. "skip_check_correctness") then -+ skip_check_correctness = .true. -+ else -+ write(error_unit, *) & -+ "Invalid value for parameter 4. Must be ""output_eigenvalues"", ""skip_check_correctness"" or omitted" -+ stop 1 -+ endif -+ -+ endif -+ -+ if (COMMAND_ARGUMENT_COUNT() == 5) then -+ call GET_COMMAND_ARGUMENT(1, arg1) -+ call GET_COMMAND_ARGUMENT(2, arg2) -+ call GET_COMMAND_ARGUMENT(3, arg3) -+ call GET_COMMAND_ARGUMENT(4, arg4) -+ call GET_COMMAND_ARGUMENT(5, arg5) -+ read(arg1, *) na -+ read(arg2, *) nev -+ read(arg3, *) nblk -+ -+ if (arg4 .eq. "output_eigenvalues") then -+ write_to_file%eigenvalues = .true. -+ else -+ write(error_unit, *) "Invalid value for output flag! Must be ""output_eigenvalues"" or omitted" -+ stop 1 -+ endif -+ -+ if (arg5 .eq. "output_eigenvectors") then -+ write_to_file%eigenvectors = .true. -+ else -+ write(error_unit, *) "Invalid value for output flag! Must be ""output_eigenvectors"" or omitted" -+ stop 1 -+ endif -+ -+ endif -+ end subroutine -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_redir.c elpa-2020.11.001_ok/examples/shared/test_redir.c ---- elpa-2020.11.001/examples/shared/test_redir.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_redir.c 2021-02-02 12:54:50.055869000 +0100 -@@ -0,0 +1,125 @@ -+// This file is part of ELPA. -+// -+// The ELPA library was originally created by the ELPA consortium, -+// consisting of the following organizations: -+// -+// - Max Planck Computing and Data Facility (MPCDF), formerly known as -+// Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+// - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+// Informatik, -+// - Technische Universität München, Lehrstuhl für Informatik mit -+// Schwerpunkt Wissenschaftliches Rechnen , -+// - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+// - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+// Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+// and -+// - IBM Deutschland GmbH -+// -+// -+// More information can be found here: -+// http://elpa.mpcdf.mpg.de/ -+// -+// ELPA is free software: you can redistribute it and/or modify -+// it under the terms of the version 3 of the license of the -+// GNU Lesser General Public License as published by the Free -+// Software Foundation. -+// -+// ELPA is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU Lesser General Public License for more details. -+// -+// You should have received a copy of the GNU Lesser General Public License -+// along with ELPA. If not, see <http://www.gnu.org/licenses/> -+// -+// ELPA reflects a substantial effort on the part of the original -+// ELPA consortium, and we ask you to respect the spirit of the -+// license that we chose: i.e., please contribute any changes you -+// may have back to the original ELPA library distribution, and keep -+// any derivatives of ELPA under the same license that we chose for -+// the original distribution, the GNU Lesser General Public License. -+// -+// -+// -------------------------------------------------------------------------------------------------- -+#include <stdio.h> -+#include <fcntl.h> -+#include <stdlib.h> -+#include <unistd.h> -+#include <sys/stat.h> -+#include <sys/types.h> -+#include <unistd.h> -+#include <errno.h> -+ -+#define NAME_LENGTH 4096 -+#define FILENAME "./mpi_stdout/std%3s_rank%04d.txt" -+ -+FILE *tout, *terr; -+void dup_filename(char *filename, int dupfd); -+void dup_fd(int fd, int dupfd); -+ -+int _mkdirifnotexists(const char *dir) { -+ struct stat s; -+ if (stat(dir, &s) != 0) { -+ if (errno == ENOENT) { -+ if (mkdir(dir, 0755) != 0) { -+ perror("mkdir"); -+ return 0; -+ } else { -+ return 1; -+ } -+ } else { -+ perror("stat()"); -+ return 0; -+ } -+ } else if (!S_ISDIR(s.st_mode)) { -+ fprintf(stderr, "\"%s\" does exist and is not a directory\n", dir); -+ return 0; -+ } else { -+ return 1; -+ } -+} -+ -+int create_directories(void) { -+ if (!_mkdirifnotexists("mpi_stdout")) return 0; -+ return 1; -+} -+ -+void redirect_stdout(int *myproc) { -+ char buf[NAME_LENGTH]; -+ -+ if (*myproc == 0) { -+ snprintf(buf, NAME_LENGTH, "tee " FILENAME, "out", *myproc); -+ tout = popen(buf, "w"); -+ dup_fd(fileno(tout), 1); -+ -+ snprintf(buf, NAME_LENGTH, "tee " FILENAME, "err", *myproc); -+ terr = popen(buf, "w"); -+ dup_fd(fileno(terr), 2); -+ } else { -+ snprintf(buf, NAME_LENGTH, FILENAME, "out", *myproc); -+ dup_filename(buf, 1); -+ -+ snprintf(buf, NAME_LENGTH, FILENAME, "err", *myproc); -+ dup_filename(buf, 2); -+ } -+ -+ return; -+} -+ -+/* Redirect file descriptor dupfd to file filename */ -+void dup_filename(char *filename, int dupfd) { -+ int fd = open(filename, O_WRONLY | O_CREAT | O_TRUNC, 0644); -+ if(fd < 0) { -+ perror("open()"); -+ exit(1); -+ } -+ dup_fd(fd, dupfd); -+} -+ -+/* Redirect file descriptor dupfd to file descriptor fd */ -+void dup_fd(int fd, int dupfd) { -+ if(dup2(fd,dupfd) < 0) { -+ perror("dup2()"); -+ exit(1); -+ } -+} -diff -ruN elpa-2020.11.001/examples/shared/test_redirect.F90 elpa-2020.11.001_ok/examples/shared/test_redirect.F90 ---- elpa-2020.11.001/examples/shared/test_redirect.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_redirect.F90 2021-02-02 12:54:50.047904000 +0100 -@@ -0,0 +1,116 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+module test_redirect -+ use, intrinsic :: iso_c_binding -+ -+ implicit none -+ public -+ -+ logical :: use_redirect_stdout -+ -+ interface -+ integer(kind=C_INT) function create_directories_c() bind(C, name="create_directories") -+ use, intrinsic :: iso_c_binding -+ implicit none -+ end function -+ end interface -+ -+ interface -+ subroutine redirect_stdout_c(myproc) bind(C, name="redirect_stdout") -+ use, intrinsic :: iso_c_binding -+ implicit none -+ integer(kind=C_INT), intent(in) :: myproc -+ end subroutine -+ end interface -+ -+ contains -+!> -+!> This function is the Fortran driver for the -+!> C program to create the redirect output -+!> directory -+!> -+!> \param none -+!> \result res integer indicates success or failure -+ function create_directories() result(res) -+ implicit none -+ integer(kind=C_INT) :: res -+ res = int(create_directories_c()) -+ end function -+!> -+!> This subroutine is the Fortran driver for the -+!> redirection of stdout and stderr of each MPI -+!> task -+!> -+!> \param myproc MPI task id -+ subroutine redirect_stdout(myproc) -+ use, intrinsic :: iso_c_binding -+ implicit none -+ integer(kind=C_INT), intent(in) :: myproc -+ call redirect_stdout_c(int(myproc, kind=C_INT)) -+ end subroutine -+!> -+!> This function checks, whether the environment variable -+!> "REDIRECT_ELPA_TEST_OUTPUT" is set to "true". -+!> Returns ".true." if variable is set, otherwise ".false." -+!> This function only works if the during the build process -+!> "HAVE_ENVIRONMENT_CHECKING" was tested successfully -+!> -+!> \param none -+!> \return logical -+ function check_redirect_environment_variable() result(redirect) -+ implicit none -+ logical :: redirect -+ character(len=255) :: REDIRECT_VARIABLE -+ -+ redirect = .false. -+ -+#if defined(HAVE_ENVIRONMENT_CHECKING) -+ call get_environment_variable("REDIRECT_ELPA_TEST_OUTPUT",REDIRECT_VARIABLE) -+#endif -+ if (trim(REDIRECT_VARIABLE) .eq. "true") redirect = .true. -+ -+ end function -+ -+end module test_redirect -diff -ruN elpa-2020.11.001/examples/shared/test_scalapack.F90 elpa-2020.11.001_ok/examples/shared/test_scalapack.F90 ---- elpa-2020.11.001/examples/shared/test_scalapack.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_scalapack.F90 2021-02-02 12:54:50.057113000 +0100 -@@ -0,0 +1,111 @@ -+! (c) Copyright Pavel Kus, 2017, MPCDF -+! -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+ -+#include "../Fortran/assert.h" -+#include "config-f90.h" -+ -+module test_scalapack -+ use test_util -+ -+ interface solve_scalapack_all -+ module procedure solve_pdsyevd -+ module procedure solve_pzheevd -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure solve_pssyevd -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure solve_pcheevd -+#endif -+ end interface -+ -+ interface solve_scalapack_part -+ module procedure solve_pdsyevr -+ module procedure solve_pzheevr -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure solve_pssyevr -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure solve_pcheevr -+#endif -+ end interface -+ -+contains -+ -+#define COMPLEXCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_scalapack_template.F90" -+#undef DOUBLE_PRECISION -+#undef COMPLEXCASE -+ -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ -+#define COMPLEXCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_scalapack_template.F90" -+#undef SINGLE_PRECISION -+#undef COMPLEXCASE -+ -+#endif /* WANT_SINGLE_PRECISION_COMPLEX */ -+ -+#define REALCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_scalapack_template.F90" -+#undef DOUBLE_PRECISION -+#undef REALCASE -+ -+#ifdef WANT_SINGLE_PRECISION_REAL -+ -+#define REALCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_scalapack_template.F90" -+#undef SINGLE_PRECISION -+#undef REALCASE -+ -+#endif /* WANT_SINGLE_PRECISION_REAL */ -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_scalapack_template.F90 elpa-2020.11.001_ok/examples/shared/test_scalapack_template.F90 ---- elpa-2020.11.001/examples/shared/test_scalapack_template.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_scalapack_template.F90 2021-02-02 12:54:50.052650000 +0100 -@@ -0,0 +1,161 @@ -+! (c) Copyright Pavel Kus, 2017, MPCDF -+! -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+ -+ ! compute all eigenvectors -+ subroutine solve_p& -+ &BLAS_CHAR_AND_SY_OR_HE& -+ &evd(na, a, sc_desc, ev, z) -+ implicit none -+#include "./test_precision_kinds.F90" -+ integer(kind=ik), intent(in) :: na -+ MATH_DATATYPE(kind=rck), intent(in) :: a(:,:) -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:) -+ real(kind=rk), intent(inout) :: ev(:) -+ integer(kind=ik), intent(in) :: sc_desc(:) -+ integer(kind=ik) :: info, lwork, liwork, lrwork -+ MATH_DATATYPE(kind=rck), allocatable :: work(:) -+ real(kind=rk), allocatable :: rwork(:) -+ integer, allocatable :: iwork(:) -+ -+ allocate(work(1), iwork(1), rwork(1)) -+ -+ ! query for required workspace -+#ifdef REALCASE -+ call p& -+ &BLAS_CHAR& -+ &syevd('V', 'U', na, a, 1, 1, sc_desc, ev, z, 1, 1, sc_desc, work, -1, iwork, -1, info) -+#endif -+#ifdef COMPLEXCASE -+ call p& -+ &BLAS_CHAR& -+ &heevd('V', 'U', na, a, 1, 1, sc_desc, ev, z, 1, 1, sc_desc, work, -1, rwork, -1, iwork, -1, info) -+#endif -+ ! write(*,*) "computed sizes", lwork, liwork, "required sizes ", work(1), iwork(1) -+ lwork = work(1) -+ liwork = iwork(1) -+ deallocate(work, iwork) -+ allocate(work(lwork), stat = info) -+ allocate(iwork(liwork), stat = info) -+#ifdef COMPLEXCASE -+ lrwork = rwork(1) -+ deallocate(rwork) -+ allocate(rwork(lrwork), stat = info) -+#endif -+ ! the actuall call to the method -+#ifdef REALCASE -+ call p& -+ &BLAS_CHAR& -+ &syevd('V', 'U', na, a, 1, 1, sc_desc, ev, z, 1, 1, sc_desc, work, lwork, iwork, liwork, info) -+#endif -+#ifdef COMPLEXCASE -+ call p& -+ &BLAS_CHAR& -+ &heevd('V', 'U', na, a, 1, 1, sc_desc, ev, z, 1, 1, sc_desc, work, lwork, rwork, lrwork, iwork, liwork, info) -+#endif -+ -+ deallocate(iwork, work, rwork) -+ end subroutine -+ -+ -+ ! compute part of eigenvectors -+ subroutine solve_p& -+ &BLAS_CHAR_AND_SY_OR_HE& -+ &evr(na, a, sc_desc, nev, ev, z) -+ implicit none -+#include "./test_precision_kinds.F90" -+ integer(kind=ik), intent(in) :: na, nev -+ MATH_DATATYPE(kind=rck), intent(in) :: a(:,:) -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:) -+ real(kind=rk), intent(inout) :: ev(:) -+ integer(kind=ik), intent(in) :: sc_desc(:) -+ integer(kind=ik) :: info, lwork, liwork, lrwork -+ MATH_DATATYPE(kind=rck), allocatable :: work(:) -+ real(kind=rk), allocatable :: rwork(:) -+ integer, allocatable :: iwork(:) -+ integer(kind=ik) :: comp_eigenval, comp_eigenvec, smallest_ev_idx, largest_ev_idx -+ -+ allocate(work(1), iwork(1), rwork(1)) -+ smallest_ev_idx = 1 -+ largest_ev_idx = nev -+ ! query for required workspace -+#ifdef REALCASE -+ call p& -+ &BLAS_CHAR& -+ &syevr('V', 'I', 'U', na, a, 1, 1, sc_desc, 0.0_rk, 0.0_rk, smallest_ev_idx, largest_ev_idx, & -+ comp_eigenval, comp_eigenvec, ev, z, 1, 1, sc_desc, work, -1, iwork, -1, info) -+#endif -+#ifdef COMPLEXCASE -+ call p& -+ &BLAS_CHAR& -+ &heevr('V', 'I', 'U', na, a, 1, 1, sc_desc, 0.0_rk, 0.0_rk, smallest_ev_idx, largest_ev_idx, & -+ comp_eigenval, comp_eigenvec, ev, z, 1, 1, sc_desc, work, -1, rwork, -1, iwork, -1, info) -+#endif -+ ! write(*,*) "computed sizes", lwork, liwork, "required sizes ", work(1), iwork(1) -+ lwork = work(1) -+ liwork = iwork(1) -+ deallocate(work, iwork) -+ allocate(work(lwork), stat = info) -+ allocate(iwork(liwork), stat = info) -+#ifdef COMPLEXCASE -+ lrwork = rwork(1) -+ deallocate(rwork) -+ allocate(rwork(lrwork), stat = info) -+#endif -+ ! the actuall call to the method -+#ifdef REALCASE -+ call p& -+ &BLAS_CHAR& -+ &syevr('V', 'I', 'U', na, a, 1, 1, sc_desc, 0.0_rk, 0.0_rk, smallest_ev_idx, largest_ev_idx, & -+ comp_eigenval, comp_eigenvec, ev, z, 1, 1, sc_desc, work, lwork, iwork, liwork, info) -+#endif -+#ifdef COMPLEXCASE -+ call p& -+ &BLAS_CHAR& -+ &heevr('V', 'I', 'U', na, a, 1, 1, sc_desc, 0.0_rk, 0.0_rk, smallest_ev_idx, largest_ev_idx, & -+ comp_eigenval, comp_eigenvec, ev, z, 1, 1, sc_desc, work, lwork, rwork, lrwork, iwork, liwork, info) -+#endif -+ assert(comp_eigenval == nev) -+ assert(comp_eigenvec == nev) -+ deallocate(iwork, work, rwork) -+ end subroutine -+ -diff -ruN elpa-2020.11.001/examples/shared/test_setup_mpi.F90 elpa-2020.11.001_ok/examples/shared/test_setup_mpi.F90 ---- elpa-2020.11.001/examples/shared/test_setup_mpi.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_setup_mpi.F90 2021-02-02 12:54:50.047215000 +0100 -@@ -0,0 +1,115 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+module test_setup_mpi -+ -+ contains -+ -+ subroutine setup_mpi(myid, nprocs) -+ use test_util -+ use ELPA_utilities -+ use precision_for_tests -+ implicit none -+ -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ TEST_INT_TYPE, intent(out) :: myid, nprocs -+ TEST_INT_MPI_TYPE :: myidMPI, nprocsMPI -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_MPI_TYPE :: required_mpi_thread_level, & -+ provided_mpi_thread_level -+#endif -+ -+ -+#ifdef WITH_MPI -+ -+#ifndef WITH_OPENMP_TRADITIONAL -+ call mpi_init(mpierr) -+#else -+ required_mpi_thread_level = MPI_THREAD_MULTIPLE -+ -+ call mpi_init_thread(required_mpi_thread_level, & -+ provided_mpi_thread_level, mpierr) -+ -+ if (required_mpi_thread_level .ne. provided_mpi_thread_level) then -+ write(error_unit,*) "MPI ERROR: MPI_THREAD_MULTIPLE is not provided on this system" -+ write(error_unit,*) " only ", mpi_thread_level_name(provided_mpi_thread_level), " is available" -+ call MPI_FINALIZE(mpierr) -+ call exit(77) -+ endif -+#endif -+ call mpi_comm_rank(mpi_comm_world, myidMPI, mpierr) -+ call mpi_comm_size(mpi_comm_world, nprocsMPI,mpierr) -+ -+ myid = int(myidMPI,kind=BLAS_KIND) -+ nprocs = int(nprocsMPI,kind=BLAS_KIND) -+ -+ if (nprocs <= 1) then -+ print *, "The test programs must be run with more than 1 task to ensure that usage with MPI is actually tested" -+ stop 1 -+ endif -+#else -+ myid = 0 -+ nprocs = 1 -+#endif -+ -+ end subroutine -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/tests_variable_definitions.F90 elpa-2020.11.001_ok/examples/shared/tests_variable_definitions.F90 ---- elpa-2020.11.001/examples/shared/tests_variable_definitions.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/tests_variable_definitions.F90 2021-02-02 12:54:50.045008000 +0100 -@@ -0,0 +1,65 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! https://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! This file was written by A. Marek, MPC -+ -+#include "config-f90.h" -+module precision_for_tests -+ use iso_c_binding, only : C_FLOAT, C_DOUBLE, C_FLOAT_COMPLEX, C_DOUBLE_COMPLEX, C_INT32_T, C_INT64_T, C_INT -+ -+ implicit none -+ integer, parameter :: rk8 = C_DOUBLE -+ integer, parameter :: rk4 = C_FLOAT -+ integer, parameter :: ck8 = C_DOUBLE_COMPLEX -+ integer, parameter :: ck4 = C_FLOAT_COMPLEX -+ integer, parameter :: ik = C_INT32_T -+ integer, parameter :: lik = C_INT64_T -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+ integer, parameter :: BLAS_KIND = C_INT64_T -+#else -+ integer, parameter :: BLAS_KIND = C_INT32_T -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+ integer, parameter :: MPI_KIND = C_INT64_T -+#else -+ integer, parameter :: MPI_KIND = C_INT32_T -+#endif -+end module precision_for_tests -diff -ruN elpa-2020.11.001/examples/shared/test_util.F90 elpa-2020.11.001_ok/examples/shared/test_util.F90 ---- elpa-2020.11.001/examples/shared/test_util.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_util.F90 2021-02-02 12:54:50.055213000 +0100 -@@ -0,0 +1,156 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+#undef TEST_INT_TYPE -+#undef INT_TYPE -+#undef TEST_INT_MPI_TYPE -+#undef INT_MPI_TYPE -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+module test_util -+ use iso_c_binding -+ use precision_for_tests -+#ifdef WITH_MPI -+#ifdef HAVE_MPI_MODULE -+ use mpi -+ implicit none -+#else -+ implicit none -+ include 'mpif.h' -+#endif -+#else -+ TEST_INT_MPI_TYPE, parameter :: mpi_comm_world = -1 -+#endif -+ -+ contains -+!> -+!> This function translates, if ELPA was build with OpenMP support, -+!> the found evel of "thread safetiness" from the internal number -+!> of the MPI library into a human understandable value -+!> -+!> \param level thread-saftiness of the MPI library -+!> \return str human understandable value of thread saftiness -+ pure function mpi_thread_level_name(level) result(str) -+ use, intrinsic :: iso_c_binding -+ implicit none -+ integer(kind=c_int), intent(in) :: level -+ character(len=21) :: str -+#ifdef WITH_MPI -+ select case(level) -+ case (MPI_THREAD_SINGLE) -+ str = "MPI_THREAD_SINGLE" -+ case (MPI_THREAD_FUNNELED) -+ str = "MPI_THREAD_FUNNELED" -+ case (MPI_THREAD_SERIALIZED) -+ str = "MPI_THREAD_SERIALIZED" -+ case (MPI_THREAD_MULTIPLE) -+ str = "MPI_THREAD_MULTIPLE" -+ case default -+ write(str,'(i0,1x,a)') level, "(Unknown level)" -+ end select -+#endif -+ end function -+ -+ function seconds() result(s) -+ integer :: ticks, tick_rate -+ real(kind=c_double) :: s -+ -+ call system_clock(count=ticks, count_rate=tick_rate) -+ s = real(ticks, kind=c_double) / tick_rate -+ end function -+ -+ subroutine x_a(condition, condition_string, file, line) -+#ifdef HAVE_ISO_FORTRAN_ENV -+ use iso_fortran_env, only : error_unit -+#endif -+ implicit none -+#ifndef HAVE_ISO_FORTRAN_ENV -+ integer, parameter :: error_unit = 0 -+#endif -+ logical, intent(in) :: condition -+ character(len=*), intent(in) :: condition_string -+ character(len=*), intent(in) :: file -+ integer, intent(in) :: line -+ -+ if (.not. condition) then -+ write(error_unit,'(a,i0)') "Assertion `" // condition_string // "` failed at " // file // ":", line -+ stop 1 -+ end if -+ end subroutine -+ -+ subroutine x_ao(error_code, error_code_string, file, line) -+ use elpa -+#ifdef HAVE_ISO_FORTRAN_ENV -+ use iso_fortran_env, only : error_unit -+#endif -+ implicit none -+#ifndef HAVE_ISO_FORTRAN_ENV -+ integer, parameter :: error_unit = 0 -+#endif -+ integer, intent(in) :: error_code -+ character(len=*), intent(in) :: error_code_string -+ character(len=*), intent(in) :: file -+ integer, intent(in) :: line -+ -+ if (error_code /= ELPA_OK) then -+ write(error_unit,'(a,i0)') "Assertion failed: `" // error_code_string // & -+ " is " // elpa_strerr(error_code) // "` at " // file // ":", line -+ stop 1 -+ end if -+ end subroutine -+end module -+ -diff -ruN elpa-2020.11.001/examples/test_real_e1.F90 elpa-2020.11.001_ok/examples/test_real_e1.F90 ---- elpa-2020.11.001/examples/test_real_e1.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/test_real_e1.F90 2021-02-18 14:16:15.389420020 +0100 -@@ -0,0 +1,255 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 1 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (4000, 1500, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+program test_real_example -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ -+ use iso_c_binding -+ -+ use elpa -+#ifdef WITH_OPENMP -+ use omp_lib -+#endif -+ -+ -+#ifdef HAVE_MPI_MODULE -+ use mpi -+ implicit none -+#else -+ implicit none -+ include 'mpif.h' -+#endif -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ integer :: nblk -+ integer :: na, nev -+ -+ integer :: np_rows, np_cols, na_rows, na_cols -+ -+ integer :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ integer :: i, mpierr, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ integer, external :: numroc -+ -+ real(kind=c_double), allocatable :: a(:,:), z(:,:), ev(:) -+ -+ integer :: iseed(4096) ! Random seed, size should be sufficient for every generator -+ -+ integer :: STATUS -+ integer :: success -+ character(len=8) :: task_suffix -+ integer :: j -+ -+ integer, parameter :: error_units = 0 -+ -+#ifdef WITH_OPENMP -+ integer n_threads -+#endif -+ class(elpa_t), pointer :: e -+ !------------------------------------------------------------------------------- -+ -+ -+ ! default parameters -+ na = 1000 -+ nev = 500 -+ nblk = 16 -+ -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world,myid,mpierr) -+ call mpi_comm_size(mpi_comm_world,nprocs,mpierr) -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ ! initialise BLACS -+ my_blacs_ctxt = mpi_comm_world -+ call BLACS_Gridinit(my_blacs_ctxt, 'C', np_rows, np_cols) -+ call BLACS_Gridinfo(my_blacs_ctxt, nprow, npcol, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ ! determine the neccessary size of the distributed matrices, -+ ! we use the scalapack tools routine NUMROC -+ -+#ifdef WITH_OPENMP -+ n_threads=omp_get_max_threads() -+#endif -+ -+ -+ na_rows = numroc(na, nblk, my_prow, 0, np_rows) -+ na_cols = numroc(na, nblk, my_pcol, 0, np_cols) -+ -+ -+ ! set up the scalapack descriptor for the checks below -+ ! For ELPA the following restrictions hold: -+ ! - block sizes in both directions must be identical (args 4 a. 5) -+ ! - first row and column of the distributed matrix must be on -+ ! row/col 0/0 (arg 6 and 7) -+ -+ call descinit(sc_desc, na, na, nblk, nblk, 0, 0, my_blacs_ctxt, na_rows, info) -+ -+ if (info .ne. 0) then -+ write(error_units,*) 'Error in BLACS descinit! info=',info -+ write(error_units,*) 'Most likely this happend since you want to use' -+ write(error_units,*) 'more MPI tasks than are possible for your' -+ write(error_units,*) 'problem size (matrix size and blocksize)!' -+ write(error_units,*) 'The blacsgrid can not be set up properly' -+ write(error_units,*) 'Try reducing the number of MPI tasks...' -+ call MPI_ABORT(mpi_comm_world, 1, mpierr) -+ endif -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ ! we want different random numbers on every process -+ ! (otherwise A might get rank deficient): -+ -+ iseed(:) = myid -+ call RANDOM_SEED(put=iseed) -+ call RANDOM_NUMBER(z) -+ -+ a(:,:) = z(:,:) -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)' -+ endif -+ call pdtran(na, na, 1.d0, z, 1, 1, sc_desc, 1.d0, a, 1, 1, sc_desc) ! A = A + Z**T -+ -+ !------------------------------------------------------------------------------- -+ -+ if (elpa_init(20171201) /= elpa_ok) then -+ print *, "ELPA API version not supported" -+ stop -+ endif -+ e => elpa_allocate() -+ -+ ! set parameters decribing the matrix and it's MPI distribution -+ call e%set("na", na, success) -+ call e%set("nev", nev, success) -+ call e%set("local_nrows", na_rows, success) -+ call e%set("local_ncols", na_cols, success) -+ call e%set("nblk", nblk, success) -+ call e%set("mpi_comm_parent", mpi_comm_world, success) -+ call e%set("process_row", my_prow, success) -+ call e%set("process_col", my_pcol, success) -+ -+#ifdef CUDA -+ call e%set("gpu", 1, success) -+#endif -+#ifdef WITH_OPENMP -+ call e%set("omp_threads", n_threads, success) -+#endif -+ success = e%setup() -+ -+ call e%set("solver", elpa_solver_1stage, success) -+ -+ -+ ! Calculate eigenvalues/eigenvectors -+ -+ if (myid==0) then -+ print '(a)','| Entering one-step ELPA solver ... ' -+ print * -+ end if -+ -+ call mpi_barrier(mpi_comm_world, mpierr) ! for correct timings only -+ call e%eigenvectors(a, ev, z, success) -+ -+ if (myid==0) then -+ print '(a)','| One-step ELPA solver complete.' -+ print * -+ end if -+ -+ call elpa_deallocate(e) -+ call elpa_uninit() -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+end -+ -diff -ruN elpa-2020.11.001/examples/test_real_e2.F90 elpa-2020.11.001_ok/examples/test_real_e2.F90 ---- elpa-2020.11.001/examples/test_real_e2.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/test_real_e2.F90 2021-02-18 14:07:41.304404521 +0100 -@@ -0,0 +1,262 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (4000, 1500, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+program test_real_example -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ -+ use iso_c_binding -+ -+ use elpa -+ -+#ifdef HAVE_MPI_MODULE -+ use mpi -+ implicit none -+#else -+ implicit none -+ include 'mpif.h' -+#endif -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ integer :: nblk -+ integer :: na, nev -+ -+ integer :: np_rows, np_cols, na_rows, na_cols -+ -+ integer :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ integer :: i, mpierr, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ integer, external :: numroc -+ -+ real(kind=c_double), allocatable :: a(:,:), z(:,:), ev(:) -+ -+ integer :: iseed(4096) ! Random seed, size should be sufficient for every generator -+ -+ integer :: STATUS -+ integer :: success -+ character(len=8) :: task_suffix -+ integer :: j -+ -+ integer, parameter :: error_units = 0 -+ -+ class(elpa_t), pointer :: e -+ !------------------------------------------------------------------------------- -+ -+ -+ ! default parameters -+ na = 1000 -+ nev = 500 -+ nblk = 16 -+ -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world,myid,mpierr) -+ call mpi_comm_size(mpi_comm_world,nprocs,mpierr) -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ ! initialise BLACS -+ my_blacs_ctxt = mpi_comm_world -+ call BLACS_Gridinit(my_blacs_ctxt, 'C', np_rows, np_cols) -+ call BLACS_Gridinfo(my_blacs_ctxt, nprow, npcol, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ ! determine the neccessary size of the distributed matrices, -+ ! we use the scalapack tools routine NUMROC -+ -+ na_rows = numroc(na, nblk, my_prow, 0, np_rows) -+ na_cols = numroc(na, nblk, my_pcol, 0, np_cols) -+ -+ -+ ! set up the scalapack descriptor for the checks below -+ ! For ELPA the following restrictions hold: -+ ! - block sizes in both directions must be identical (args 4 a. 5) -+ ! - first row and column of the distributed matrix must be on -+ ! row/col 0/0 (arg 6 and 7) -+ -+ call descinit(sc_desc, na, na, nblk, nblk, 0, 0, my_blacs_ctxt, na_rows, info) -+ -+ if (info .ne. 0) then -+ write(error_units,*) 'Error in BLACS descinit! info=',info -+ write(error_units,*) 'Most likely this happend since you want to use' -+ write(error_units,*) 'more MPI tasks than are possible for your' -+ write(error_units,*) 'problem size (matrix size and blocksize)!' -+ write(error_units,*) 'The blacsgrid can not be set up properly' -+ write(error_units,*) 'Try reducing the number of MPI tasks...' -+ call MPI_ABORT(mpi_comm_world, 1, mpierr) -+ endif -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ ! we want different random numbers on every process -+ ! (otherwise A might get rank deficient): -+ -+ iseed(:) = myid -+ call RANDOM_SEED(put=iseed) -+ call RANDOM_NUMBER(z) -+ -+ a(:,:) = z(:,:) -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)' -+ endif -+ call pdtran(na, na, 1.d0, z, 1, 1, sc_desc, 1.d0, a, 1, 1, sc_desc) ! A = A + Z**T -+ -+ !------------------------------------------------------------------------------- -+ -+ if (elpa_init(20171201) /= elpa_ok) then -+ print *, "ELPA API version not supported" -+ stop -+ endif -+ e => elpa_allocate() -+ -+ ! set parameters decribing the matrix and it's MPI distribution -+ call e%set("na", na, success) -+ call e%set("nev", nev, success) -+ call e%set("local_nrows", na_rows, success) -+ call e%set("local_ncols", na_cols, success) -+ call e%set("nblk", nblk, success) -+ call e%set("mpi_comm_parent", mpi_comm_world, success) -+ call e%set("process_row", my_prow, success) -+ call e%set("process_col", my_pcol, success) -+#ifdef CUDA -+ call e%set("gpu", 1, success) -+#endif -+ -+ success = e%setup() -+ -+ call e%set("solver", elpa_solver_2stage, success) -+ -+#ifdef CUDAKERNEL -+ call e%set("real_kernel", ELPA_2STAGE_REAL_GPU, success) -+#endif -+#ifdef AVX512 -+ call e%set("real_kernel", ELPA_2STAGE_REAL_AVX512_BLOCK2,success ) -+#endif -+#ifdef AVX2_B6 -+ call e%set("real_kernel", ELPA_2STAGE_REAL_AVX2_BLOCK6,success ) -+#endif -+#ifdef AVX2_B4 -+ call e%set("real_kernel", ELPA_2STAGE_REAL_AVX2_BLOCK4,success ) -+#endif -+#ifdef AVX2_B2 -+ call e%set("real_kernel", ELPA_2STAGE_REAL_AVX2_BLOCK2,success ) -+#endif -+#ifdef GENERIC -+ call e%set("real_kernel", ELPA_2STAGE_REAL_GENERIC,success ) -+#endif -+#ifdef GENERIC_SIMPLE -+ call e%set("real_kernel", ELPA_2STAGE_REAL_GENERIC_SIMPLE,success ) -+#endif -+ -+ -+ ! Calculate eigenvalues/eigenvectors -+ -+ if (myid==0) then -+ print '(a)','| Entering two-step ELPA solver ... ' -+ print * -+ end if -+ -+ call mpi_barrier(mpi_comm_world, mpierr) ! for correct timings only -+ call e%eigenvectors(a, ev, z, success) -+ -+ if (myid==0) then -+ print '(a)','| Two-step ELPA solver complete.' -+ print * -+ end if -+ -+ call elpa_deallocate(e) -+ call elpa_uninit() -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+end -+ diff --git a/Golden_Repo/e/ESMF/ESMF-8.0.1-gpsmkl-2020.eb b/Golden_Repo/e/ESMF/ESMF-8.0.1-gpsmkl-2020.eb deleted file mode 100644 index 1960e35aa38d6136b397654f6d32f49fa3318f3d..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ESMF/ESMF-8.0.1-gpsmkl-2020.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ESMF' -version = '8.0.1' - -homepage = 'https://www.earthsystemcog.org/projects/esmf/' -description = """The Earth System Modeling Framework (ESMF) is software for building and coupling weather, - climate, and related models. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://github.com/esmf-org/esmf/archive/'] -sources = ['%%(name)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('netCDF-C++4', '4.3.1'), -] - -patches = [ - 'psmpi_lmpifort.patch', -] - -# ESMF ignores xxFLAGS -preconfigopts = 'export ESMF_BOPT="O" && ' -preconfigopts += 'export ESMF_OPTLEVEL="2" && ' -preinstallopts = 'export ESMF_CXXLINKOPTS="-lmpifort" && ' - -# too parallel causes the build to become really slow -maxparallel = 8 - -moduleclass = 'geo' diff --git a/Golden_Repo/e/ESMF/ESMF-8.0.1-gpsmkl-2021.eb b/Golden_Repo/e/ESMF/ESMF-8.0.1-gpsmkl-2021.eb deleted file mode 100644 index 316121f50cc022b0719d4b2512dded98e4cc59f3..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ESMF/ESMF-8.0.1-gpsmkl-2021.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ESMF' -version = '8.0.1' - -homepage = 'https://www.earthsystemcog.org/projects/esmf/' -description = """The Earth System Modeling Framework (ESMF) is software for building and coupling weather, - climate, and related models. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://github.com/esmf-org/esmf/archive/'] -sources = ['%%(name)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('netCDF-C++4', '4.3.1'), -] - -patches = [ - 'psmpi_lmpifort.patch', -] - -# ESMF ignores xxFLAGS -preconfigopts = 'export ESMF_BOPT="O" && ' -preconfigopts += 'export ESMF_OPTLEVEL="2" && ' -preinstallopts = 'export ESMF_CXXLINKOPTS="-lmpifort" && ' - -# too parallel causes the build to become really slow -maxparallel = 8 - -moduleclass = 'geo' diff --git a/Golden_Repo/e/ESMF/ESMF-8.0.1-intel-para-2020.eb b/Golden_Repo/e/ESMF/ESMF-8.0.1-intel-para-2020.eb deleted file mode 100644 index 4b0ba713438dece590338c669dc699870247c7ee..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ESMF/ESMF-8.0.1-intel-para-2020.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ESMF' -version = '8.0.1' - -homepage = 'https://www.earthsystemcog.org/projects/esmf/' -description = """The Earth System Modeling Framework (ESMF) is software for building and coupling weather, - climate, and related models. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://github.com/esmf-org/esmf/archive/'] -sources = ['%%(name)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('netCDF-C++4', '4.3.1'), -] - -patches = [ - 'psmpi_lmpifort.patch', -] - -# ESMF ignores xxFLAGS -preconfigopts = 'export ESMF_BOPT="O" && ' -preconfigopts += 'export ESMF_OPTLEVEL="2" && ' -preinstallopts = 'export ESMF_CXXLINKOPTS="-lmpifort" && ' - -# too parallel causes the build to become really slow -maxparallel = 8 - -moduleclass = 'geo' diff --git a/Golden_Repo/e/ESMF/ESMF-8.0.1-intel-para-2021.eb b/Golden_Repo/e/ESMF/ESMF-8.0.1-intel-para-2021.eb deleted file mode 100644 index 43a642f081e6891f97b96f602972abf5c804b72a..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ESMF/ESMF-8.0.1-intel-para-2021.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ESMF' -version = '8.0.1' - -homepage = 'https://www.earthsystemcog.org/projects/esmf/' -description = """The Earth System Modeling Framework (ESMF) is software for building and coupling weather, - climate, and related models. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://github.com/esmf-org/esmf/archive/'] -sources = ['%%(name)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('netCDF-C++4', '4.3.1'), -] - -patches = [ - 'psmpi_lmpifort.patch', -] - -# ESMF ignores xxFLAGS -preconfigopts = 'export ESMF_BOPT="O" && ' -preconfigopts += 'export ESMF_OPTLEVEL="2" && ' -preinstallopts = 'export ESMF_CXXLINKOPTS="-lmpifort" && ' - -# too parallel causes the build to become really slow -maxparallel = 8 - -moduleclass = 'geo' diff --git a/Golden_Repo/e/ESMF/psmpi_lmpifort.patch b/Golden_Repo/e/ESMF/psmpi_lmpifort.patch deleted file mode 100644 index 7038af50405750fe228e518565dad42ce61fb354..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ESMF/psmpi_lmpifort.patch +++ /dev/null @@ -1,10 +0,0 @@ ---- esmf/build_config/Linux.intel.default/build_rules.mk.orig 2016-04-25 17:31:54.667689000 +0200 -+++ esmf/build_config/Linux.intel.default/build_rules.mk 2016-04-25 17:32:22.991830000 +0200 -@@ -45,6 +45,7 @@ ifeq ($(ESMF_COMM),mpich2) - # Mpich2 --------------------------------------------------- - ESMF_F90DEFAULT = mpif90 - ESMF_CXXDEFAULT = mpicxx -+ESMF_CXXLINKLIBS += -lmpifort - ESMF_MPIRUNDEFAULT = mpirun $(ESMF_MPILAUNCHOPTIONS) - ESMF_MPIMPMDRUNDEFAULT = mpiexec $(ESMF_MPILAUNCHOPTIONS) - else diff --git a/Golden_Repo/e/Eigen/Eigen-3.3.7-GCCcore-10.3.0.eb b/Golden_Repo/e/Eigen/Eigen-3.3.7-GCCcore-10.3.0.eb deleted file mode 100644 index 5732bffc8f21cfd8e9fcf15e7945e64abb17aab1..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Eigen/Eigen-3.3.7-GCCcore-10.3.0.eb +++ /dev/null @@ -1,23 +0,0 @@ -name = 'Eigen' -version = '3.3.7' - -homepage = 'http://eigen.tuxfamily.org' -description = """Eigen is a C++ template library for linear algebra: - matrices, vectors, numerical solvers, and related algorithms. -""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -# only includes header files, but requires CMake so using non-system toolchain -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://gitlab.com/libeigen/eigen/-/archive/%(version)s'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['685adf14bd8e9c015b78097c1dc22f2f01343756f196acdc76a678e1ae352e11'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -moduleclass = 'math' diff --git a/Golden_Repo/e/Eigen/Eigen-3.3.7-GCCcore-9.3.0.eb b/Golden_Repo/e/Eigen/Eigen-3.3.7-GCCcore-9.3.0.eb deleted file mode 100644 index f308b5de5bd9c81b90e855f9978a37fb83498633..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Eigen/Eigen-3.3.7-GCCcore-9.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'Eigen' -version = '3.3.7' - -homepage = 'http://eigen.tuxfamily.org' -description = """Eigen is a C++ template library for linear algebra: - matrices, vectors, numerical solvers, and related algorithms. -""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -# only includes header files, but requires CMake so using non-system toolchain -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://gitlab.com/libeigen/eigen/-/archive/%(version)s'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['685adf14bd8e9c015b78097c1dc22f2f01343756f196acdc76a678e1ae352e11'] - -# using CMake built with GCCcore to avoid relying on the system compiler to build it -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -moduleclass = 'math' diff --git a/Golden_Repo/e/Elemental/Elemental-0.87.7-gpsmkl-2020.eb b/Golden_Repo/e/Elemental/Elemental-0.87.7-gpsmkl-2020.eb deleted file mode 100644 index 83c7f25f9b2b9b66a619fe03ba38e34cb9d91b0f..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Elemental/Elemental-0.87.7-gpsmkl-2020.eb +++ /dev/null @@ -1,79 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> D.Alvarez <d.alvarez@fz-juelich.de> -# -# This work implements Elemental 0.87.7 -# -# https://computation.llnl.gov/casc/Elemental/main.html -# -## -easyblock = 'CMakeMake' - -name = 'Elemental' -version = '0.87.7' - -homepage = 'http://libelemental.org/download/' -description = """Elemental is an open-source library for distributed-memory dense and sparse-direct linear algebra and -and optimization which builds on top of BLAS, LAPACK, and MPI using modern C++ and additionally exposes interfaces to C -and Python. - -The hybrid version uses a combination of MPI and OpenMP. -Version 0.87.7 has been installed as module in $EBROOTELEMENTAL""" - -examples = """Examples can be found in $EBROOTELEMENTAL/examples""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'openmp': True} - -sources = [SOURCE_TGZ] -source_urls = ['http://libelemental.org/pub/releases'] - -patches = [ - 'elemental-0.87.7_rangehermitianeig.patch', - # See https://github.com/elemental/Elemental/pull/264 - 'Elemental-0.87.7_ambiguous_template.patch' -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('ParMETIS', '4.0.3'), -] - -separate_build_dir = 'True' - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' - -configopts = '-DEL_EXAMPLES=ON -DEL_TESTS=ON -DMATH_LIBS="$LIBLAPACK_MT" -DINSTALL_PYTHON_PACKAGE=OFF -DEL_HYBRID=ON ' -configopts += '-DPARMETIS_LIB_DIR=$EBROOTPARMETIS/lib -DPARMETIS_INCLUDE_DIR=$EBROOTPARMETIS/include ' - -postinstallcmds = [ - "cp -r %(builddir)s/%(name)s-%(version)s/examples %(installdir)s/examples", - "chmod 755 %(installdir)s/examples", - "chmod 755 %(installdir)s/examples/*", - "chmod 644 %(installdir)s/examples/blas_like/*", - "chmod 644 %(installdir)s/examples/core/*", - "chmod 644 %(installdir)s/examples/interface/*", - "chmod 644 %(installdir)s/examples/io/*", - "chmod 644 %(installdir)s/examples/lapack_like/*", - "chmod 644 %(installdir)s/examples/matrices/*", - "chmod 644 %(installdir)s/examples/optimization/*", -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['include', 'lib64'], -} - -modextravars = { - 'ELEMENTAL_INCLUDE': '%(installdir)s/include', - 'ELEMENTAL_LIB': '%(installdir)s/lib', - 'ELEMENTAL_ROOT': '%(installdir)s', -} - -moduleclass = 'math' diff --git a/Golden_Repo/e/Elemental/Elemental-0.87.7-intel-2020.eb b/Golden_Repo/e/Elemental/Elemental-0.87.7-intel-2020.eb deleted file mode 100644 index de2043e69ccebf54cfca06be38d7e2c7f429aa1a..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Elemental/Elemental-0.87.7-intel-2020.eb +++ /dev/null @@ -1,77 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> D.Alvarez <d.alvarez@fz-juelich.de> -# -# This work implements Elemental 0.87.7 -# -# https://computation.llnl.gov/casc/Elemental/main.html -# -## -easyblock = 'CMakeMake' - -name = 'Elemental' -version = '0.87.7' - -homepage = 'http://libelemental.org/download/' -description = """Elemental is an open-source library for distributed-memory dense and sparse-direct linear algebra and -and optimization which builds on top of BLAS, LAPACK, and MPI using modern C++ and additionally exposes interfaces to C -and Python. - -The hybrid version uses a combination of MPI and OpenMP. -Version 0.87.7 has been installed as module in $EBROOTELEMENTAL""" - -examples = """Examples can be found in $EBROOTELEMENTAL/examples""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'openmp': True} - -sources = [SOURCE_TGZ] -source_urls = ['http://libelemental.org/pub/releases'] - -patches = [ - 'elemental-0.87.7_rangehermitianeig.patch' -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('ParMETIS', '4.0.3'), -] - -separate_build_dir = 'True' - -preconfigopts = 'export I_MPI_FABRICS=shm; ' - -configopts = '-DEL_EXAMPLES=ON -DEL_TESTS=ON -DMATH_LIBS="$LIBLAPACK_MT" -DINSTALL_PYTHON_PACKAGE=OFF -DEL_HYBRID=ON ' -configopts += '-DPARMETIS_LIB_DIR=$EBROOTPARMETIS/lib -DPARMETIS_INCLUDE_DIR=$EBROOTPARMETIS/include ' - -postinstallcmds = [ - "cp -r %(builddir)s/%(name)s-%(version)s/examples %(installdir)s/examples", - "chmod 755 %(installdir)s/examples", - "chmod 755 %(installdir)s/examples/*", - "chmod 644 %(installdir)s/examples/blas_like/*", - "chmod 644 %(installdir)s/examples/core/*", - "chmod 644 %(installdir)s/examples/interface/*", - "chmod 644 %(installdir)s/examples/io/*", - "chmod 644 %(installdir)s/examples/lapack_like/*", - "chmod 644 %(installdir)s/examples/matrices/*", - "chmod 644 %(installdir)s/examples/optimization/*", -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['include', 'lib64'], -} - -modextravars = { - 'ELEMENTAL_INCLUDE': '%(installdir)s/include', - 'ELEMENTAL_LIB': '%(installdir)s/lib', - 'ELEMENTAL_ROOT': '%(installdir)s', -} - -moduleclass = 'math' diff --git a/Golden_Repo/e/Elemental/Elemental-0.87.7-intel-para-2020.eb b/Golden_Repo/e/Elemental/Elemental-0.87.7-intel-para-2020.eb deleted file mode 100644 index a32b0e6294f2e162a70ee89a79cbc5496089b49a..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Elemental/Elemental-0.87.7-intel-para-2020.eb +++ /dev/null @@ -1,75 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> D.Alvarez <d.alvarez@fz-juelich.de> -# -# This work implements Elemental 0.87.7 -# -# https://computation.llnl.gov/casc/Elemental/main.html -# -## -easyblock = 'CMakeMake' - -name = 'Elemental' -version = '0.87.7' - -homepage = 'http://libelemental.org/download/' -description = """Elemental is an open-source library for distributed-memory dense and sparse-direct linear algebra and -and optimization which builds on top of BLAS, LAPACK, and MPI using modern C++ and additionally exposes interfaces to C -and Python. - -The hybrid version uses a combination of MPI and OpenMP. -Version 0.87.7 has been installed as module in $EBROOTELEMENTAL""" - -examples = """Examples can be found in $EBROOTELEMENTAL/examples""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'openmp': True} - -sources = [SOURCE_TGZ] -source_urls = ['http://libelemental.org/pub/releases'] - -patches = [ - 'elemental-0.87.7_rangehermitianeig.patch' -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('ParMETIS', '4.0.3'), -] - -separate_build_dir = 'True' - -configopts = '-DEL_EXAMPLES=ON -DEL_TESTS=ON -DMATH_LIBS="$LIBLAPACK_MT" -DINSTALL_PYTHON_PACKAGE=OFF -DEL_HYBRID=ON ' -configopts += '-DPARMETIS_LIB_DIR=$EBROOTPARMETIS/lib -DPARMETIS_INCLUDE_DIR=$EBROOTPARMETIS/include ' - -postinstallcmds = [ - "cp -r %(builddir)s/%(name)s-%(version)s/examples %(installdir)s/examples", - "chmod 755 %(installdir)s/examples", - "chmod 755 %(installdir)s/examples/*", - "chmod 644 %(installdir)s/examples/blas_like/*", - "chmod 644 %(installdir)s/examples/core/*", - "chmod 644 %(installdir)s/examples/interface/*", - "chmod 644 %(installdir)s/examples/io/*", - "chmod 644 %(installdir)s/examples/lapack_like/*", - "chmod 644 %(installdir)s/examples/matrices/*", - "chmod 644 %(installdir)s/examples/optimization/*", -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['include', 'lib64'], -} - -modextravars = { - 'ELEMENTAL_INCLUDE': '%(installdir)s/include', - 'ELEMENTAL_LIB': '%(installdir)s/lib', - 'ELEMENTAL_ROOT': '%(installdir)s', -} - -moduleclass = 'math' diff --git a/Golden_Repo/e/Elemental/Elemental-0.87.7_ambiguous_template.patch b/Golden_Repo/e/Elemental/Elemental-0.87.7_ambiguous_template.patch deleted file mode 100644 index 563863fddce0ea3ec4738318734e243d71a4071c..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Elemental/Elemental-0.87.7_ambiguous_template.patch +++ /dev/null @@ -1,459 +0,0 @@ -diff --git a/include/El/blas_like/level1/decl.hpp b/include/El/blas_like/level1/decl.hpp -index e129c0968..7565b88b0 100644 ---- a/include/El/blas_like/level1/decl.hpp -+++ b/include/El/blas_like/level1/decl.hpp -@@ -487,7 +487,7 @@ void Contract( const BlockMatrix<T>& A, BlockMatrix<T>& B ); - template<typename T> - void Copy( const Matrix<T>& A, Matrix<T>& B ); - template<typename S,typename T, -- typename=EnableIf<CanCast<S,T>>> -+ typename=EnableIf<And< CanCast<S,T>, Not<IsSame<S,T>> >>> - void Copy( const Matrix<S>& A, Matrix<T>& B ); - - template<typename S,typename T, -@@ -501,7 +501,7 @@ void Copy( const BlockMatrix<S>& A, BlockMatrix<T>& B ); - template<typename T> - void Copy( const AbstractDistMatrix<T>& A, AbstractDistMatrix<T>& B ); - template<typename S,typename T, -- typename=EnableIf<CanCast<S,T>>> -+ typename=EnableIf<And< CanCast<S,T>, Not<IsSame<S,T>> >>> - void Copy( const AbstractDistMatrix<S>& A, AbstractDistMatrix<T>& B ); - - template<typename T> -@@ -532,7 +532,7 @@ template<typename T> - void Copy( const SparseMatrix<T>& A, SparseMatrix<T>& B ); - - template<typename S,typename T, -- typename=EnableIf<CanCast<S,T>>> -+ typename=EnableIf<And< CanCast<S,T>, Not<IsSame<S,T>> >> > - void Copy( const SparseMatrix<S>& A, SparseMatrix<T>& B ); - - template<typename S,typename T, -@@ -543,7 +543,7 @@ template<typename T> - void Copy( const DistSparseMatrix<T>& A, DistSparseMatrix<T>& B ); - - template<typename S,typename T, -- typename=EnableIf<CanCast<S,T>>> -+ typename=EnableIf<And<CanCast<S,T>,Not<IsSame<S,T>>>>> - void Copy( const DistSparseMatrix<S>& A, DistSparseMatrix<T>& B ); - - template<typename S,typename T, -@@ -559,7 +559,7 @@ template<typename T> - void Copy( const DistMultiVec<T>& A, DistMultiVec<T>& B ); - - template<typename S,typename T, -- typename=EnableIf<CanCast<S,T>>> -+ typename=EnableIf< And< CanCast<S,T>, Not<IsSame<S,T>> > >> - void Copy( const DistMultiVec<S>& A, DistMultiVec<T>& B ); - - template<typename T> -diff --git a/include/El/core.hpp b/include/El/core.hpp -index 02fa0e418..0f89acaed 100644 ---- a/include/El/core.hpp -+++ b/include/El/core.hpp -@@ -102,6 +102,16 @@ enum FortranLogicalEnum - template<typename S,typename T> - using IsSame = std::is_same<S,T>; - -+template<typename S, typename T> -+struct And { -+ static constexpr bool value = S::value && T::value; -+}; -+ -+template<typename T> -+struct Not { -+ static constexpr bool value = !T::value; -+}; -+ - template<typename Condition,class T=void> - using EnableIf = typename std::enable_if<Condition::value,T>::type; - template<typename Condition,class T=void> -diff --git a/src/core/imports/mpi.cpp b/src/core/imports/mpi.cpp -index 845caa34a..473f6f280 100644 ---- a/src/core/imports/mpi.cpp -+++ b/src/core/imports/mpi.cpp -@@ -2845,238 +2845,248 @@ EL_NO_RELEASE_EXCEPT - #endif - } - --#define MPI_PROTO(T) \ -+#define MPI_PROTO_BASE(T) \ - template bool Test( Request<T>& request ) EL_NO_RELEASE_EXCEPT; \ - template void Wait( Request<T>& request ) EL_NO_RELEASE_EXCEPT; \ - template void Wait( Request<T>& request, Status& status ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void WaitAll( int numRequests, Request<T>* requests ) \ -+ template void WaitAll<T>( int numRequests, Request<T>* requests ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void WaitAll \ -+ template void WaitAll<T> \ - ( int numRequests, Request<T>* requests, Status* statuses ) \ - EL_NO_RELEASE_EXCEPT; \ - template int GetCount<T>( Status& status ) EL_NO_RELEASE_EXCEPT; \ -- template void TaggedSend \ -+ template void TaggedSend<T> \ - ( const T* buf, int count, int to, int tag, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Send( const T* buf, int count, int to, Comm comm ) \ -+ template void Send<T>( const T* buf, int count, int to, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void TaggedSend( T b, int to, int tag, Comm comm ) \ -+ template void TaggedSend<T>( T b, int to, int tag, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Send( T b, int to, Comm comm ) \ -+ template void Send<T>( T b, int to, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void TaggedISend \ -- ( const T* buf, int count, int to, int tag, Comm comm, Request<T>& request ) \ -- EL_NO_RELEASE_EXCEPT; \ -- template void ISend \ -+ template void ISend<T> \ - ( const T* buf, int count, int to, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void TaggedISend \ -+ template void TaggedISend<T> \ - ( T buf, int to, int tag, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void ISend( T buf, int to, Comm comm, Request<T>& request ) \ -+ template void ISend<T>( T buf, int to, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void TaggedISSend \ -+ template void TaggedISSend<T> \ - ( const T* buf, int count, int to, int tag, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void ISSend \ -+ template void ISSend<T> \ - ( const T* buf, int count, int to, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void TaggedISSend \ -+ template void TaggedISSend<T> \ - ( T b, int to, int tag, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void TaggedRecv \ -+ template void TaggedRecv<T> \ - ( T* buf, int count, int from, int tag, Comm comm ) EL_NO_RELEASE_EXCEPT; \ -- template void Recv( T* buf, int count, int from, Comm comm ) \ -+ template void Recv<T>( T* buf, int count, int from, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ - template T TaggedRecv<T>( int from, int tag, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T Recv( int from, Comm comm ) EL_NO_RELEASE_EXCEPT; \ -- template void TaggedIRecv \ -+ template T Recv<T>( int from, Comm comm ) EL_NO_RELEASE_EXCEPT; \ -+ template void TaggedIRecv<T> \ - ( T* buf, int count, int from, int tag, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void IRecv \ -+ template void IRecv<T> \ - ( T* buf, int count, int from, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ - template T TaggedIRecv<T> \ - ( int from, int tag, Comm comm, Request<T>& request ) EL_NO_RELEASE_EXCEPT; \ - template T IRecv<T>( int from, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void TaggedSendRecv \ -+ template void TaggedSendRecv<T> \ - ( const T* sbuf, int sc, int to, int stag, \ - T* rbuf, int rc, int from, int rtag, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void SendRecv \ -+ template void SendRecv<T> \ - ( const T* sbuf, int sc, int to, \ - T* rbuf, int rc, int from, Comm comm ) EL_NO_RELEASE_EXCEPT; \ -- template T TaggedSendRecv \ -+ template T TaggedSendRecv<T> \ - ( T sb, int to, int stag, int from, int rtag, Comm comm ); \ - template T SendRecv( T sb, int to, int from, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void TaggedSendRecv \ -+ template void TaggedSendRecv<T> \ - ( T* buf, int count, int to, int stag, int from, int rtag, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void SendRecv \ -+ template void SendRecv<T> \ - ( T* buf, int count, int to, int from, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Broadcast( T* buf, int count, int root, Comm comm ) \ -+ template void Broadcast<T>( T& b, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Broadcast( T& b, int root, Comm comm ) \ -- EL_NO_RELEASE_EXCEPT; \ -- template void IBroadcast \ -+ template void IBroadcast<T> \ - ( T* buf, int count, int root, Comm comm, Request<T>& request ); \ -- template void IBroadcast \ -+ template void IBroadcast<T> \ - ( T& b, int root, Comm comm, Request<T>& request ); \ -- template void Gather \ -- ( const T* sbuf, int sc, T* rbuf, int rc, int root, Comm comm ) \ -- EL_NO_RELEASE_EXCEPT; \ -- template void IGather \ -+ template void IGather<T> \ - ( const T* sbuf, int sc, \ - T* rbuf, int rc, \ - int root, Comm comm, Request<T>& request ); \ -- template void Gather \ -- ( const T* sbuf, int sc, \ -- T* rbuf, const int* rcs, const int* rds, int root, Comm comm ) \ -+ template vector<T> AllToAll<T> \ -+ ( const vector<T>& sendBuf, \ -+ const vector<int>& sendCounts, \ -+ const vector<int>& sendOffs, \ -+ Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void AllGather( const T* sbuf, int sc, T* rbuf, int rc, Comm comm ) \ -+ template void Reduce<T> \ -+ ( const T* sbuf, T* rbuf, int count, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void AllGather \ -- ( const T* sbuf, int sc, \ -- T* rbuf, const int* rcs, const int* rds, Comm comm ) \ -+ template T Reduce<T>( T sb, Op op, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Scatter \ -- ( const T* sbuf, int sc, \ -- T* rbuf, int rc, int root, Comm comm ) \ -+ template T Reduce<T>( T sb, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Scatter( T* buf, int sc, int rc, int root, Comm comm ) \ -+ template void Reduce<T>( T* buf, int count, Op op, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void AllToAll \ -- ( const T* sbuf, int sc, \ -- T* rbuf, int rc, Comm comm ) \ -+ template void Reduce<T>( T* buf, int count, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void AllToAll \ -- ( const T* sbuf, const int* scs, const int* sds, \ -- T* rbuf, const int* rcs, const int* rds, Comm comm ) \ -+ template void AllReduce<T>( const T* sbuf, T* rbuf, int count, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template vector<T> AllToAll \ -- ( const vector<T>& sendBuf, \ -- const vector<int>& sendCounts, \ -- const vector<int>& sendOffs, \ -- Comm comm ) \ -+ template T AllReduce<T>( T sb, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Reduce \ -- ( const T* sbuf, T* rbuf, int count, Op op, int root, Comm comm ) \ -+ template T AllReduce<T>( T sb, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Reduce \ -- ( const T* sbuf, T* rbuf, int count, int root, Comm comm ) \ -+ template void AllReduce<T>( T* buf, int count, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T Reduce( T sb, Op op, int root, Comm comm ) \ -+ template void ReduceScatter<T>( T* sbuf, T* rbuf, int rc, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T Reduce( T sb, int root, Comm comm ) \ -+ template void ReduceScatter<T>( T* sbuf, T* rbuf, int rc, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Reduce( T* buf, int count, Op op, int root, Comm comm ) \ -+ template T ReduceScatter<T>( T sb, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Reduce( T* buf, int count, int root, Comm comm ) \ -+ template T ReduceScatter<T>( T sb, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void AllReduce \ -- ( const T* sbuf, T* rbuf, int count, Op op, Comm comm ) \ -+ template void ReduceScatter<T>( T* buf, int rc, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void AllReduce( const T* sbuf, T* rbuf, int count, Comm comm ) \ -+ template void ReduceScatter<T>( T* buf, int rc, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T AllReduce( T sb, Op op, Comm comm ) \ -+ template void ReduceScatter<T> \ -+ ( const T* sbuf, T* rbuf, const int* rcs, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T AllReduce( T sb, Comm comm ) \ -+ template void ReduceScatter<T> \ -+ ( const T* sbuf, T* rbuf, const int* rcs, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void AllReduce( T* buf, int count, Op op, Comm comm ) \ -+ template void Scan<T>( const T* sbuf, T* rbuf, int count, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void AllReduce( T* buf, int count, Comm comm ) \ -+ template void Scan<T>( const T* sbuf, T* rbuf, int count, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void ReduceScatter( T* sbuf, T* rbuf, int rc, Op op, Comm comm ) \ -+ template T Scan<T>( T sb, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void ReduceScatter( T* sbuf, T* rbuf, int rc, Comm comm ) \ -+ template T Scan<T>( T sb, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T ReduceScatter( T sb, Op op, Comm comm ) \ -+ template void Scan<T>( T* buf, int count, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T ReduceScatter( T sb, Comm comm ) \ -+ template void Scan<T>( T* buf, int count, Comm comm ) \ -+ EL_NO_RELEASE_EXCEPT; -+ -+#define MPI_PROTO_DIFF(S,T) \ -+ template void TaggedISend<S> \ -+ ( const T* buf, int count, int to, int tag, Comm comm, Request<T>& request ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void ReduceScatter( T* buf, int rc, Op op, Comm comm ) \ -+ template void Broadcast<S>( T* buf, int count, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void ReduceScatter( T* buf, int rc, Comm comm ) \ -+ template void Gather<S> \ -+ ( const T* sbuf, int sc, T* rbuf, int rc, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void ReduceScatter \ -- ( const T* sbuf, T* rbuf, const int* rcs, Op op, Comm comm ) \ -+ template void Gather<S> \ -+ ( const T* sbuf, int sc, \ -+ T* rbuf, const int* rcs, const int* rds, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void ReduceScatter \ -- ( const T* sbuf, T* rbuf, const int* rcs, Comm comm ) \ -+ template void AllGather<S>( const T* sbuf, int sc, T* rbuf, int rc, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Scan( const T* sbuf, T* rbuf, int count, Op op, Comm comm ) \ -+ template void AllGather<S> \ -+ ( const T* sbuf, int sc, \ -+ T* rbuf, const int* rcs, const int* rds, Comm comm ) \ -+ EL_NO_RELEASE_EXCEPT; \ -+ template void Scatter<S> \ -+ ( const T* sbuf, int sc, \ -+ T* rbuf, int rc, int root, Comm comm ) \ -+ EL_NO_RELEASE_EXCEPT; \ -+ template void Scatter<S>( T* buf, int sc, int rc, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Scan( const T* sbuf, T* rbuf, int count, Comm comm ) \ -+ template void AllToAll<S> \ -+ ( const T* sbuf, int sc, \ -+ T* rbuf, int rc, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T Scan( T sb, Op op, Comm comm ) \ -+ template void AllToAll<S> \ -+ ( const T* sbuf, const int* scs, const int* sds, \ -+ T* rbuf, const int* rcs, const int* rds, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template T Scan( T sb, Comm comm ) \ -+ template void Reduce<S> \ -+ ( const T* sbuf, T* rbuf, int count, Op op, int root, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Scan( T* buf, int count, Op op, Comm comm ) \ -+ template void AllReduce<S> \ -+ ( const T* sbuf, T* rbuf, int count, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; \ -- template void Scan( T* buf, int count, Comm comm ) \ -+ template void AllReduce<S>( T* buf, int count, Op op, Comm comm ) \ - EL_NO_RELEASE_EXCEPT; - --MPI_PROTO(byte) --MPI_PROTO(int) --MPI_PROTO(unsigned) --MPI_PROTO(long int) --MPI_PROTO(unsigned long) -+#define MPI_PROTO_REAL(T) \ -+ MPI_PROTO_BASE(T) \ -+ MPI_PROTO_DIFF(T, T) -+ -+#define MPI_PROTO_COMPLEX(T) \ -+ MPI_PROTO_BASE(Complex<T>) \ -+ MPI_PROTO_DIFF(T, Complex<T>) -+ -+MPI_PROTO_REAL(byte) -+MPI_PROTO_REAL(int) -+MPI_PROTO_REAL(unsigned) -+MPI_PROTO_REAL(long int) -+MPI_PROTO_REAL(unsigned long) - #ifdef EL_HAVE_MPI_LONG_LONG --MPI_PROTO(long long int) --MPI_PROTO(unsigned long long) -+MPI_PROTO_REAL(long long int) -+MPI_PROTO_REAL(unsigned long long) - #endif --MPI_PROTO(ValueInt<Int>) --MPI_PROTO(Entry<Int>) --MPI_PROTO(float) --MPI_PROTO(Complex<float>) --MPI_PROTO(ValueInt<float>) --MPI_PROTO(ValueInt<Complex<float>>) --MPI_PROTO(Entry<float>) --MPI_PROTO(Entry<Complex<float>>) --MPI_PROTO(double) --MPI_PROTO(Complex<double>) --MPI_PROTO(ValueInt<double>) --MPI_PROTO(ValueInt<Complex<double>>) --MPI_PROTO(Entry<double>) --MPI_PROTO(Entry<Complex<double>>) -+MPI_PROTO_REAL(ValueInt<Int>) -+MPI_PROTO_REAL(Entry<Int>) -+MPI_PROTO_REAL(float) -+MPI_PROTO_COMPLEX(float) -+MPI_PROTO_REAL(ValueInt<float>) -+MPI_PROTO_REAL(ValueInt<Complex<float>>) -+MPI_PROTO_REAL(Entry<float>) -+MPI_PROTO_REAL(Entry<Complex<float>>) -+MPI_PROTO_REAL(double) -+MPI_PROTO_COMPLEX(double) -+MPI_PROTO_REAL(ValueInt<double>) -+MPI_PROTO_REAL(ValueInt<Complex<double>>) -+MPI_PROTO_REAL(Entry<double>) -+MPI_PROTO_REAL(Entry<Complex<double>>) - #ifdef EL_HAVE_QD --MPI_PROTO(DoubleDouble) --MPI_PROTO(QuadDouble) --MPI_PROTO(Complex<DoubleDouble>) --MPI_PROTO(Complex<QuadDouble>) --MPI_PROTO(ValueInt<DoubleDouble>) --MPI_PROTO(ValueInt<QuadDouble>) --MPI_PROTO(ValueInt<Complex<DoubleDouble>>) --MPI_PROTO(ValueInt<Complex<QuadDouble>>) --MPI_PROTO(Entry<DoubleDouble>) --MPI_PROTO(Entry<QuadDouble>) --MPI_PROTO(Entry<Complex<DoubleDouble>>) --MPI_PROTO(Entry<Complex<QuadDouble>>) -+MPI_PROTO_REAL(DoubleDouble) -+MPI_PROTO_REAL(QuadDouble) -+MPI_PROTO_COMPLEX(DoubleDouble) -+MPI_PROTO_COMPLEX(QuadDouble) -+MPI_PROTO_REAL(ValueInt<DoubleDouble>) -+MPI_PROTO_REAL(ValueInt<QuadDouble>) -+MPI_PROTO_REAL(ValueInt<Complex<DoubleDouble>>) -+MPI_PROTO_REAL(ValueInt<Complex<QuadDouble>>) -+MPI_PROTO_REAL(Entry<DoubleDouble>) -+MPI_PROTO_REAL(Entry<QuadDouble>) -+MPI_PROTO_REAL(Entry<Complex<DoubleDouble>>) -+MPI_PROTO_REAL(Entry<Complex<QuadDouble>>) - #endif - #ifdef EL_HAVE_QUAD --MPI_PROTO(Quad) --MPI_PROTO(Complex<Quad>) --MPI_PROTO(ValueInt<Quad>) --MPI_PROTO(ValueInt<Complex<Quad>>) --MPI_PROTO(Entry<Quad>) --MPI_PROTO(Entry<Complex<Quad>>) -+MPI_PROTO_REAL(Quad) -+MPI_PROTO_COMPLEX(Quad) -+MPI_PROTO_REAL(ValueInt<Quad>) -+MPI_PROTO_REAL(ValueInt<Complex<Quad>>) -+MPI_PROTO_REAL(Entry<Quad>) -+MPI_PROTO_REAL(Entry<Complex<Quad>>) - #endif - #ifdef EL_HAVE_MPC --MPI_PROTO(BigInt) --MPI_PROTO(BigFloat) --MPI_PROTO(Complex<BigFloat>) --MPI_PROTO(ValueInt<BigInt>) --MPI_PROTO(ValueInt<BigFloat>) --MPI_PROTO(ValueInt<Complex<BigFloat>>) --MPI_PROTO(Entry<BigInt>) --MPI_PROTO(Entry<BigFloat>) --MPI_PROTO(Entry<Complex<BigFloat>>) -+MPI_PROTO_REAL(BigInt) -+MPI_PROTO_REAL(BigFloat) -+MPI_PROTO_REAL(Complex<BigFloat>) -+MPI_PROTO_REAL(ValueInt<BigInt>) -+MPI_PROTO_REAL(ValueInt<BigFloat>) -+MPI_PROTO_REAL(ValueInt<Complex<BigFloat>>) -+MPI_PROTO_REAL(Entry<BigInt>) -+MPI_PROTO_REAL(Entry<BigFloat>) -+MPI_PROTO_REAL(Entry<Complex<BigFloat>>) - #endif - - #define PROTO(T) \ diff --git a/Golden_Repo/e/Elemental/elemental-0.87.7_rangehermitianeig.patch b/Golden_Repo/e/Elemental/elemental-0.87.7_rangehermitianeig.patch deleted file mode 100644 index f94ee5c6fa22f8c139dad98601e2f6f5fec5f7d4..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Elemental/elemental-0.87.7_rangehermitianeig.patch +++ /dev/null @@ -1,921 +0,0 @@ ---- Elemental-0.87.7/src/lapack_like/spectral/BidiagSVD.cpp 2017-02-07 02:23:30.000000000 +0100 -+++ Elemental-0.87.7_ok/src/lapack_like/spectral/BidiagSVD.cpp 2017-04-21 11:00:49.566845000 +0200 -@@ -1,5 +1,5 @@ - /* -- Copyright (c) 2009-2016, Jack Poulson -+ Copyright (c) 2009-2017, Jack Poulson - All rights reserved. - - This file is part of Elemental and is under the BSD 2-Clause License, ---- Elemental-0.87.7/src/lapack_like/spectral/HermitianEig.cpp 2017-02-07 02:23:30.000000000 +0100 -+++ Elemental-0.87.7_ok/src/lapack_like/spectral/HermitianEig.cpp 2017-04-21 11:01:19.743963000 +0200 -@@ -1,9 +1,9 @@ - /* -- Copyright (c) 2009-2016, Jack Poulson -+ Copyright (c) 2009-2017, Jack Poulson - All rights reserved. - -- This file is part of Elemental and is under the BSD 2-Clause License, -- which can be found in the LICENSE file in the root directory, or at -+ This file is part of Elemental and is under the BSD 2-Clause License, -+ which can be found in the LICENSE file in the root directory, or at - http://opensource.org/licenses/BSD-2-Clause - */ - #include <El.hpp> -@@ -78,8 +78,8 @@ - - namespace herm_eig { - --// We create specialized redistribution routines for redistributing the --// real eigenvectors of the symmetric tridiagonal matrix at the core of our -+// We create specialized redistribution routines for redistributing the -+// real eigenvectors of the symmetric tridiagonal matrix at the core of our - // eigensolver in order to minimize the temporary memory usage. - template<typename F> - void InPlaceRedist( DistMatrix<F>& Q, Int rowAlign, const Base<F>* readBuffer ) -@@ -101,7 +101,7 @@ - const Int maxHeight = MaxLength(height,r); - const Int maxWidth = MaxLength(width,p); - const Int portionSize = mpi::Pad( maxHeight*maxWidth ); -- -+ - // Allocate our send/recv buffers - vector<Real> buffer(2*r*portionSize); - Real* sendBuffer = &buffer[0]; -@@ -119,7 +119,7 @@ - EL_INNER_PARALLEL_FOR_COLLAPSE2 - for( Int j=0; j<localWidth; ++j ) - for( Int i=0; i<thisLocalHeight; ++i ) -- data[i+j*thisLocalHeight] = -+ data[i+j*thisLocalHeight] = - readBuffer[thisColShift+i*r+j*height]; - } - -@@ -172,7 +172,7 @@ - HermitianEigInfo - BlackBox - ( UpperOrLower uplo, -- Matrix<F>& A, -+ Matrix<F>& A, - Matrix<Base<F>>& w, - const HermitianEigCtrl<F>& ctrl ) - { -@@ -205,13 +205,13 @@ - const Int n = A.Height(); - Int numValid = n; - if( subset.indexSubset ) -- { -+ { - numValid = subset.upperIndex-subset.lowerIndex+1; - } - else if( subset.rangeSubset ) - { - if( subset.lowerBound >= Real(0) || subset.upperBound < Real(0) ) -- { -+ { - numValid = 0; - } - } -@@ -256,7 +256,7 @@ - HermitianEigInfo - HermitianEig - ( UpperOrLower uplo, -- Matrix<F>& A, -+ Matrix<F>& A, - Matrix<Base<F>>& w, - const HermitianEigCtrl<F>& ctrl ) - { -@@ -279,7 +279,7 @@ - HermitianEigInfo - SequentialHelper - ( UpperOrLower uplo, -- AbstractDistMatrix<F>& A, -+ AbstractDistMatrix<F>& A, - AbstractDistMatrix<Base<F>>& w, - const HermitianEigCtrl<F>& ctrl ) - { -@@ -314,7 +314,8 @@ - - - #ifdef EL_HAVE_SCALAPACK --template<typename F,typename=EnableIf<IsBlasScalar<Base<F>>>> -+template<typename F, -+ typename=EnableIf<IsBlasScalar<Base<F>>>> - HermitianEigInfo - ScaLAPACKHelper - ( UpperOrLower uplo, -@@ -367,7 +368,9 @@ - return info; - } - --template<typename F,typename=DisableIf<IsBlasScalar<Base<F>>>,typename=void> -+template<typename F, -+ typename=DisableIf<IsBlasScalar<Base<F>>>, -+ typename=void> - HermitianEigInfo - ScaLAPACKHelper - ( UpperOrLower uplo, -@@ -426,7 +429,7 @@ - else if( subset.rangeSubset ) - { - if( subset.lowerBound >= Real(0) || subset.upperBound < Real(0) ) -- { -+ { - numValid = 0; - } - } -@@ -451,7 +454,7 @@ - if( A.Grid().Rank() == 0 ) - timer.Start(); - } -- -+ - // Tridiagonalize A - herm_tridiag::ExplicitCondensed( uplo, A, ctrl.tridiagCtrl ); - -@@ -540,7 +543,7 @@ - ( UpperOrLower uplo, - Matrix<F>& A, - Matrix<Base<F>>& w, -- Matrix<F>& Q, -+ Matrix<F>& Q, - const HermitianEigCtrl<F>& ctrl ) - { - EL_DEBUG_CSE -@@ -566,14 +569,14 @@ - ( UpperOrLower uplo, - AbstractDistMatrix<F>& APre, - AbstractDistMatrix<Base<F>>& w, -- AbstractDistMatrix<F>& QPre, -+ AbstractDistMatrix<F>& QPre, - const HermitianEigCtrl<F>& ctrl ) - { - EL_DEBUG_CSE - const Grid& g = APre.Grid(); - HermitianEigInfo info; - -- DistMatrixReadProxy<F,F,MC,MR> AProx( APre ); -+ DistMatrixReadProxy<F,F,MC,MR> AProx( APre ); - auto& A = AProx.Get(); - - // TODO(poulson): Extend interface to support ctrl.tridiagCtrl -@@ -613,7 +616,7 @@ - ( UpperOrLower uplo, - Matrix<F>& A, - Matrix<Base<F>>& w, -- Matrix<F>& Q, -+ Matrix<F>& Q, - const HermitianEigCtrl<F>& ctrl ) - { - EL_DEBUG_CSE -@@ -648,13 +651,13 @@ - const Int n = A.Height(); - Int numValid = n; - if( subset.indexSubset ) -- { -+ { - numValid = subset.upperIndex-subset.lowerIndex+1; - } - else if( subset.rangeSubset ) - { - if( subset.lowerBound >= Real(0) || subset.upperBound < Real(0) ) -- { -+ { - numValid = 0; - } - } -@@ -701,9 +704,9 @@ - HermitianEigInfo - SequentialHelper - ( UpperOrLower uplo, -- AbstractDistMatrix<F>& A, -+ AbstractDistMatrix<F>& A, - AbstractDistMatrix<Base<F>>& w, -- AbstractDistMatrix<F>& Q, -+ AbstractDistMatrix<F>& Q, - const HermitianEigCtrl<F>& ctrl ) - { - EL_DEBUG_CSE -@@ -728,7 +731,7 @@ - Matrix<Base<F>> wProx; - Matrix<F> QProx; - wProx.Resize( n, 1 ); -- QProx.Resize( n, n ); -+ QProx.Resize( n, n ); - - info = HermitianEig( uplo, A.Matrix(), wProx, QProx, ctrl ); - -@@ -749,7 +752,8 @@ - } - - #ifdef EL_HAVE_SCALAPACK --template<typename F,typename=EnableIf<IsBlasScalar<Base<F>>>> -+template<typename F, -+ typename=EnableIf<IsBlasScalar<Base<F>>>> - HermitianEigInfo - ScaLAPACKHelper - ( UpperOrLower uplo, -@@ -816,7 +820,9 @@ - return info; - } - --template<typename F,typename=DisableIf<IsBlasScalar<Base<F>>>,typename=void> -+template<typename F, -+ typename=DisableIf<IsBlasScalar<Base<F>>>, -+ typename=void> - HermitianEigInfo - ScaLAPACKHelper - ( UpperOrLower uplo, -@@ -890,18 +896,18 @@ - else - kEst = n; - -- // We will use the same buffer for Q in the vector distribution used by -- // PMRRR as for the matrix distribution used by Elemental. In order to -+ // We will use the same buffer for Q in the vector distribution used by -+ // PMRRR as for the matrix distribution used by Elemental. In order to - // do so, we must pad Q's dimensions slightly. - const Int N = MaxLength(n,g.Height())*g.Height(); -- const Int K = MaxLength(kEst,g.Size())*g.Size(); -+ const Int K = MaxLength(kEst,g.Size())*g.Size(); - - ElementalProxyCtrl proxCtrl; - proxCtrl.colConstrain = true; - proxCtrl.rowConstrain = true; - proxCtrl.colAlign = 0; - proxCtrl.rowAlign = 0; -- -+ - DistMatrixWriteProxy<F,F,MC,MR> QProx( QPre, proxCtrl ); - auto& Q = QProx.Get(); - -@@ -941,14 +947,14 @@ - - const Int k = w.Height(); - { -- // Redistribute Q piece-by-piece in place. This is to keep the -+ // Redistribute Q piece-by-piece in place. This is to keep the - // send/recv buffer memory usage low. - const Int p = g.Size(); - const Int numEqualPanels = K/p; - const Int numPanelsPerComm = (numEqualPanels / TARGET_CHUNKS) + 1; - const Int nbProp = numPanelsPerComm*p; - -- // Manually maintain information about the implicit Q[* ,VR] stored -+ // Manually maintain information about the implicit Q[* ,VR] stored - // at the end of the Q[MC,MR] buffers. - Int alignment = 0; - const Real* readBuffer = Q_STAR_VR.LockedBuffer(); -@@ -1104,7 +1110,7 @@ - } - - auto sortPairs = TaggedSort( w, ctrl.tridiagEigCtrl.sort ); -- for( Int j=0; j<n; ++j ) -+ for( Int j=0; j<w.Height(); ++j ) - w.Set( j, 0, sortPairs[j].value ); - ApplyTaggedSortToEachRow( sortPairs, Q ); - ---- Elemental-0.87.7/src/lapack_like/spectral/HermitianTridiagEig.cpp 2017-02-07 02:23:30.000000000 +0100 -+++ Elemental-0.87.7_ok/src/lapack_like/spectral/HermitianTridiagEig.cpp 2017-04-21 11:01:34.618650000 +0200 -@@ -1,9 +1,9 @@ - /* -- Copyright (c) 2009-2016, Jack Poulson -+ Copyright (c) 2009-2017, Jack Poulson - All rights reserved. - -- This file is part of Elemental and is under the BSD 2-Clause License, -- which can be found in the LICENSE file in the root directory, or at -+ This file is part of Elemental and is under the BSD 2-Clause License, -+ which can be found in the LICENSE file in the root directory, or at - http://opensource.org/licenses/BSD-2-Clause - */ - #include <El.hpp> -@@ -315,7 +315,7 @@ - { - EL_DEBUG_CSE - HermitianTridiagEigInfo info; -- w = d; -+ w = d; - info.qrInfo = QRAlg( w, dSub, ctrl ); - herm_eig::SortAndFilter( w, ctrl ); - return info; -@@ -332,6 +332,8 @@ - EL_DEBUG_CSE - HermitianTridiagEigInfo info; - auto ctrlMod( ctrl ); -+ ctrlMod.subset.indexSubset = false; -+ ctrlMod.subset.rangeSubset = false; - ctrlMod.wantEigVecs = false; - Matrix<Real> Q; - info.dcInfo = DivideAndConquer( d, dSub, w, Q, ctrlMod ); -@@ -339,7 +341,8 @@ - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - LAPACKHelper - ( Matrix<Real>& d, -@@ -355,7 +358,7 @@ - if( ctrl.subset.rangeSubset ) - { - const Int k = lapack::SymmetricTridiagEig -- ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), -+ ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), - ctrl.subset.lowerBound, ctrl.subset.upperBound ); - w.Resize( k, 1 ); - } -@@ -363,7 +366,7 @@ - { - const Int numEig = ctrl.subset.upperIndex-ctrl.subset.lowerIndex+1; - lapack::SymmetricTridiagEig -- ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), -+ ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), - BlasInt(ctrl.subset.lowerIndex), - BlasInt(ctrl.subset.upperIndex) ); - w.Resize( numEig, 1 ); -@@ -376,7 +379,8 @@ - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - Helper - ( const Matrix<Real>& d, -@@ -403,7 +407,9 @@ - return LAPACKHelper( dMod, dSubMod, w, ctrl ); - } - --template<typename Real,typename=DisableIf<IsBlasScalar<Real>>,typename=void> -+template<typename Real, -+ typename=DisableIf<IsBlasScalar<Real>>, -+ typename=void> - HermitianTridiagEigInfo - Helper - ( const Matrix<Real>& d, -@@ -462,7 +468,7 @@ - QRHelper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -- AbstractDistMatrix<Real>& w, -+ AbstractDistMatrix<Real>& w, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -515,6 +521,8 @@ - auto& w = wProx.Get(); - - auto ctrlMod( ctrl ); -+ ctrlMod.subset.indexSubset = false; -+ ctrlMod.subset.rangeSubset = false; - ctrlMod.wantEigVecs = false; - DistMatrix<Real> Q(w.Grid()); - info.dcInfo = -@@ -546,6 +554,8 @@ - auto& w = wProx.Get(); - - auto ctrlMod( ctrl ); -+ ctrlMod.subset.indexSubset = false; -+ ctrlMod.subset.rangeSubset = false; - ctrlMod.wantEigVecs = false; - DistMatrix<Real> Q(w.Grid()); - info.dcInfo = -@@ -555,7 +565,8 @@ - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - MRRRHelper - ( const AbstractDistMatrix<Real>& d, -@@ -586,17 +597,17 @@ - herm_tridiag_eig::Info rangeInfo; - if( ctrl.subset.rangeSubset ) - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), -- wVector.data(), w.ColComm(), -+ ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), -+ wVector.data(), w.ColComm(), - ctrl.subset.lowerBound, ctrl.subset.upperBound ); - else if( ctrl.subset.indexSubset ) - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), -- wVector.data(), w.ColComm(), -+ ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), -+ wVector.data(), w.ColComm(), - int(ctrl.subset.lowerIndex), int(ctrl.subset.upperIndex) ); - else - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), -+ ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), - wVector.data(), w.ColComm() ); - w.Resize( rangeInfo.numGlobalEigenvalues, 1 ); - for( Int iLoc=0; iLoc<w.LocalHeight(); ++iLoc ) -@@ -606,7 +617,8 @@ - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - Helper - ( const AbstractDistMatrix<Real>& d, -@@ -629,12 +641,14 @@ - } - } - --template<typename Real,typename=DisableIf<IsBlasScalar<Real>>,typename=void> -+template<typename Real, -+ typename=DisableIf<IsBlasScalar<Real>>, -+ typename=void> - HermitianTridiagEigInfo - Helper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -- AbstractDistMatrix<Real>& w, -+ AbstractDistMatrix<Real>& w, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -648,12 +662,13 @@ - } - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - MRRRHelper - ( const AbstractDistMatrix<Real >& d, - const AbstractDistMatrix<Complex<Real>>& dSub, -- AbstractDistMatrix<Real >& wPre, -+ AbstractDistMatrix<Real >& wPre, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -711,12 +726,13 @@ - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - Helper - ( const AbstractDistMatrix<Real >& d, - const AbstractDistMatrix<Complex<Real>>& dSub, -- AbstractDistMatrix<Real >& wPre, -+ AbstractDistMatrix<Real >& wPre, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -734,12 +750,14 @@ - } - } - --template<typename Real,typename=DisableIf<IsBlasScalar<Real>>,typename=void> -+template<typename Real, -+ typename=DisableIf<IsBlasScalar<Real>>, -+ typename=void> - HermitianTridiagEigInfo - Helper - ( const AbstractDistMatrix<Real >& d, - const AbstractDistMatrix<Complex<Real>>& dSub, -- AbstractDistMatrix<Real >& w, -+ AbstractDistMatrix<Real >& w, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -760,7 +778,7 @@ - HermitianTridiagEig - ( const AbstractDistMatrix<Base<F>>& d, - const AbstractDistMatrix<F >& dSub, -- AbstractDistMatrix<Base<F>>& w, -+ AbstractDistMatrix<Base<F>>& w, - const HermitianTridiagEigCtrl<Base<F>>& ctrl ) - { - EL_DEBUG_CSE -@@ -783,7 +801,7 @@ - { - EL_DEBUG_CSE - HermitianTridiagEigInfo info; -- w = d; -+ w = d; - info.qrInfo = QRAlg( w, dSub, Q, ctrl ); - herm_eig::SortAndFilter( w, Q, ctrl ); - return info; -@@ -806,13 +824,17 @@ - } - else - { -- info.dcInfo = DivideAndConquer( d, dSub, w, Q, ctrl ); -+ auto ctrlMod( ctrl ); -+ ctrlMod.subset.indexSubset = false; -+ ctrlMod.subset.rangeSubset = false; -+ info.dcInfo = DivideAndConquer( d, dSub, w, Q, ctrlMod ); - herm_eig::SortAndFilter( w, Q, ctrl ); - } - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - LAPACKHelper - ( Matrix<Real>& d, -@@ -831,7 +853,7 @@ - { - Q.Resize( n, n ); - const Int k = lapack::SymmetricTridiagEig -- ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), -+ ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), - Q.Buffer(), BlasInt(Q.LDim()), - ctrl.subset.lowerBound, ctrl.subset.upperBound ); - w.Resize( k, 1 ); -@@ -842,7 +864,7 @@ - const Int numEig = ctrl.subset.upperIndex-ctrl.subset.lowerIndex+1; - Q.Resize( n, numEig ); - lapack::SymmetricTridiagEig -- ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), -+ ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), - Q.Buffer(), BlasInt(Q.LDim()), - BlasInt(ctrl.subset.lowerIndex), - BlasInt(ctrl.subset.upperIndex) ); -@@ -852,7 +874,7 @@ - { - Q.Resize( n, n ); - lapack::SymmetricTridiagEig -- ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), -+ ( BlasInt(n), d.Buffer(), dSub.Buffer(), w.Buffer(), - Q.Buffer(), BlasInt(Q.LDim()) ); - } - auto sortPairs = TaggedSort( w, ctrl.sort ); -@@ -863,7 +885,8 @@ - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - Helper - ( const Matrix<Real>& d, -@@ -894,7 +917,9 @@ - } - } - --template<typename Real,typename=DisableIf<IsBlasScalar<Real>>,typename=void> -+template<typename Real, -+ typename=DisableIf<IsBlasScalar<Real>>, -+ typename=void> - HermitianTridiagEigInfo - Helper - ( const Matrix<Real>& d, -@@ -925,7 +950,7 @@ - Helper - ( const Matrix<Real>& d, - const Matrix<Complex<Real>>& dSub, -- Matrix<Real>& w, -+ Matrix<Real>& w, - Matrix<Complex<Real>>& Q, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { -@@ -953,7 +978,7 @@ - ( const Matrix<Base<F>>& d, - const Matrix<F>& dSub, - Matrix<Base<F>>& w, -- Matrix<F>& Q, -+ Matrix<F>& Q, - const HermitianTridiagEigCtrl<Base<F>>& ctrl ) - { - EL_DEBUG_CSE -@@ -967,8 +992,8 @@ - QRHelper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -- AbstractDistMatrix<Real>& w, -- AbstractDistMatrix<Real>& QPre, -+ AbstractDistMatrix<Real>& w, -+ AbstractDistMatrix<Real>& QPre, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1000,8 +1025,8 @@ - QRHelper - ( const AbstractDistMatrix<Real >& d, - const AbstractDistMatrix<Complex<Real>>& dSub, -- AbstractDistMatrix<Real >& w, -- AbstractDistMatrix<Complex<Real>>& QPre, -+ AbstractDistMatrix<Real >& w, -+ AbstractDistMatrix<Complex<Real>>& QPre, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1048,8 +1073,8 @@ - DCHelper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -- AbstractDistMatrix<Real>& wPre, -- AbstractDistMatrix<Real>& QPre, -+ AbstractDistMatrix<Real>& wPre, -+ AbstractDistMatrix<Real>& QPre, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1066,9 +1091,12 @@ - auto& w = wProx.Get(); - DistMatrixWriteProxy<Real,Real,MC,MR> QProx( QPre ); - auto& Q = QProx.Get(); -+ auto ctrlMod( ctrl ); -+ ctrlMod.subset.indexSubset = false; -+ ctrlMod.subset.rangeSubset = false; - info.dcInfo = - DivideAndConquer -- ( d_STAR_STAR.Matrix(), dSub_STAR_STAR.Matrix(), w, Q, ctrl ); -+ ( d_STAR_STAR.Matrix(), dSub_STAR_STAR.Matrix(), w, Q, ctrlMod ); - herm_eig::SortAndFilter( w, Q, ctrl ); - } - -@@ -1107,9 +1135,12 @@ - DistMatrixWriteProxy<Real,Real,STAR,STAR> wProx( wPre ); - auto& w = wProx.Get(); - -+ auto ctrlMod( ctrl ); -+ ctrlMod.subset.indexSubset = false; -+ ctrlMod.subset.rangeSubset = false; - info.dcInfo = - DivideAndConquer -- ( d_STAR_STAR.Matrix(), dSubReal.Matrix(), w, QReal, ctrl ); -+ ( d_STAR_STAR.Matrix(), dSubReal.Matrix(), w, QReal, ctrlMod ); - herm_eig::SortAndFilter( w, QReal, ctrl ); - - Copy( QReal, Q ); -@@ -1119,13 +1150,14 @@ - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - MRRRHelper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -- AbstractDistMatrix<Real>& wPre, -- AbstractDistMatrix<Real>& QPre, -+ AbstractDistMatrix<Real>& wPre, -+ AbstractDistMatrix<Real>& QPre, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1174,17 +1206,17 @@ - vector<double> wVector(n); - if( ctrl.subset.rangeSubset ) - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), -+ ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), - wVector.data(), Q.Buffer(), Q.LDim(), w.ColComm(), - ctrl.subset.lowerBound, ctrl.subset.upperBound ); - else if( ctrl.subset.indexSubset ) - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), -+ ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), - wVector.data(), Q.Buffer(), Q.LDim(), w.ColComm(), - int(ctrl.subset.lowerIndex), int(ctrl.subset.upperIndex) ); - else - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), -+ ( int(n), d_STAR_STAR.Buffer(), dSub_STAR_STAR.Buffer(), - wVector.data(), Q.Buffer(), Q.LDim(), w.ColComm() ); - w.Resize( rangeInfo.numGlobalEigenvalues, 1 ); - Q.Resize( n, rangeInfo.numGlobalEigenvalues ); -@@ -1199,13 +1231,14 @@ - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - MRRRHelper - ( const AbstractDistMatrix<Real >& d, - const AbstractDistMatrix<Complex<Real>>& dSub, -- AbstractDistMatrix<Real >& wPre, -- AbstractDistMatrix<Complex<Real>>& QPre, -+ AbstractDistMatrix<Real >& wPre, -+ AbstractDistMatrix<Complex<Real>>& QPre, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1261,17 +1294,17 @@ - vector<double> wVector(n); - if( ctrl.subset.rangeSubset ) - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSubReal.Buffer(), -+ ( int(n), d_STAR_STAR.Buffer(), dSubReal.Buffer(), - wVector.data(), QReal.Buffer(), QReal.LDim(), w.ColComm(), - ctrl.subset.lowerBound, ctrl.subset.upperBound ); - else if( ctrl.subset.indexSubset ) - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSubReal.Buffer(), -+ ( int(n), d_STAR_STAR.Buffer(), dSubReal.Buffer(), - wVector.data(), QReal.Buffer(), QReal.LDim(), w.ColComm(), - int(ctrl.subset.lowerIndex), int(ctrl.subset.upperIndex) ); - else - rangeInfo = herm_tridiag_eig::Eig -- ( int(n), d_STAR_STAR.Buffer(), dSubReal.Buffer(), -+ ( int(n), d_STAR_STAR.Buffer(), dSubReal.Buffer(), - wVector.data(), QReal.Buffer(), QReal.LDim(), w.ColComm() ); - - w.Resize( rangeInfo.numGlobalEigenvalues, 1 ); -@@ -1287,18 +1320,19 @@ - ApplyTaggedSortToEachRow( sortPairs, QReal ); - - Copy( QReal, Q ); -- DiagonalScale( LEFT, NORMAL, phase, Q ); -+ DiagonalScale( LEFT, NORMAL, phase, Q ); - - return info; - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - Helper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -- AbstractDistMatrix<Real>& w, -- AbstractDistMatrix<Real>& Q, -+ AbstractDistMatrix<Real>& w, -+ AbstractDistMatrix<Real>& Q, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1316,13 +1350,15 @@ - } - } - --template<typename Real,typename=DisableIf<IsBlasScalar<Real>>,typename=void> -+template<typename Real, -+ typename=DisableIf<IsBlasScalar<Real>>, -+ typename=void> - HermitianTridiagEigInfo - Helper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -- AbstractDistMatrix<Real>& w, -- AbstractDistMatrix<Real>& Q, -+ AbstractDistMatrix<Real>& w, -+ AbstractDistMatrix<Real>& Q, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1336,13 +1372,14 @@ - } - } - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - Helper - ( const AbstractDistMatrix<Real >& d, - const AbstractDistMatrix<Complex<Real>>& dSub, -- AbstractDistMatrix<Real >& w, -- AbstractDistMatrix<Complex<Real>>& Q, -+ AbstractDistMatrix<Real >& w, -+ AbstractDistMatrix<Complex<Real>>& Q, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1360,13 +1397,15 @@ - } - } - --template<typename Real,typename=DisableIf<IsBlasScalar<Real>>,typename=void> -+template<typename Real, -+ typename=DisableIf<IsBlasScalar<Real>>, -+ typename=void> - HermitianTridiagEigInfo - Helper - ( const AbstractDistMatrix<Real >& d, - const AbstractDistMatrix<Complex<Real>>& dSub, -- AbstractDistMatrix<Real >& w, -- AbstractDistMatrix<Complex<Real>>& QPre, -+ AbstractDistMatrix<Real >& w, -+ AbstractDistMatrix<Complex<Real>>& QPre, - const HermitianTridiagEigCtrl<Real>& ctrl ) - { - EL_DEBUG_CSE -@@ -1388,7 +1427,7 @@ - ( const AbstractDistMatrix<Base<F>>& d, - const AbstractDistMatrix<F>& dSub, - AbstractDistMatrix<Base<F>>& w, -- AbstractDistMatrix<F>& Q, -+ AbstractDistMatrix<F>& Q, - const HermitianTridiagEigCtrl<Base<F>>& ctrl ) - { - EL_DEBUG_CSE -@@ -1397,7 +1436,8 @@ - - namespace herm_tridiag_eig { - --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - Int MRRREstimateHelper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -@@ -1421,7 +1461,9 @@ - return estimate.numGlobalEigenvalues; - } - --template<typename Real,typename=DisableIf<IsBlasScalar<Real>>,typename=void> -+template<typename Real, -+ typename=DisableIf<IsBlasScalar<Real>>, -+ typename=void> - Int MRRREstimateHelper - ( const AbstractDistMatrix<Real>& d, - const AbstractDistMatrix<Real>& dSub, -@@ -1435,7 +1477,8 @@ - } - - // Q is assumed to be sufficiently large and properly aligned --template<typename Real,typename=EnableIf<IsBlasScalar<Real>>> -+template<typename Real, -+ typename=EnableIf<IsBlasScalar<Real>>> - HermitianTridiagEigInfo - MRRRPostEstimateHelper - ( const AbstractDistMatrix<Real>& d, -@@ -1488,7 +1531,9 @@ - return info; - } - --template<typename Real,typename=DisableIf<IsBlasScalar<Real>>,typename=void> -+template<typename Real, -+ typename=DisableIf<IsBlasScalar<Real>>, -+ typename=void> - HermitianTridiagEigInfo - MRRRPostEstimateHelper - ( const AbstractDistMatrix<Real>& d, ---- Elemental-0.87.7/src/lapack_like/spectral/HermitianTridiagEig/DivideAndConquer.hpp 2017-02-07 02:23:30.000000000 +0100 -+++ Elemental-0.87.7_ok/src/lapack_like/spectral/HermitianTridiagEig/DivideAndConquer.hpp 2017-04-21 11:02:22.872284000 +0200 -@@ -1,5 +1,5 @@ - /* -- Copyright (c) 2009-2016, Jack Poulson -+ Copyright (c) 2009-2017, Jack Poulson - All rights reserved. - - This file is part of Elemental and is under the BSD 2-Clause License, -@@ -1126,10 +1126,13 @@ - EL_DEBUG_CSE - const Int n = mainDiag.Height(); - const auto& dcCtrl = ctrl.dcCtrl; -+ if( ctrl.subset.indexSubset || ctrl.subset.rangeSubset ) -+ LogicError -+ ("DivideAndConquer should not have been called directly for subset " -+ "computation"); - - DCInfo info; - auto& secularInfo = info.secularInfo; -- - if( n <= Max(dcCtrl.cutoff,3) ) - { - auto ctrlMod( ctrl ); -@@ -1230,11 +1233,15 @@ - bool topLevel=true ) - { - EL_DEBUG_CSE -- const Grid& grid = Q.Grid(); - const Int n = mainDiag.Height(); - const auto& dcCtrl = ctrl.dcCtrl; -- DCInfo info; -+ const Grid& grid = Q.Grid(); -+ if( ctrl.subset.indexSubset || ctrl.subset.rangeSubset ) -+ LogicError -+ ("DivideAndConquer should not have been called directly for subset " -+ "computation"); - -+ DCInfo info; - if( n <= Max(dcCtrl.cutoff,3) ) - { - // Run the problem redundantly locally ---- Elemental-0.87.7/cmake/configure_files/ElementalConfig.cmake.in 2017-02-07 02:23:30.000000000 +0100 -+++ Elemental-0.87.7_ok/cmake/configure_files/ElementalConfig.cmake.in 2017-10-06 14:31:00.754510000 +0200 -@@ -13,6 +13,6 @@ - set(Elemental_DEFINITIONS "@Qt5Widgets_DEFINITIONS@") - - # Our library dependencies (contains definitions for IMPORTED targets) --include("@CMAKE_INSTALL_PREFIX@/CMake/ElementalTargets.cmake") -+include("@CMAKE_INSTALL_PREFIX@/CMake/elemental/ElementalTargets.cmake") - - set(Elemental_LIBRARIES El) diff --git a/Golden_Repo/e/Emacs/Emacs-27.1-GCCcore-10.3.0.eb b/Golden_Repo/e/Emacs/Emacs-27.1-GCCcore-10.3.0.eb deleted file mode 100644 index 2d370a581345fe39258b06c28756a881a9ca04a1..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Emacs/Emacs-27.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Emacs' -version = '27.1' - -homepage = 'http://www.gnu.org/software/emacs/' -description = """GNU Emacs is an extensible, customizable text editor--and more. - At its core is an interpreter for Emacs Lisp, a dialect of the Lisp programming - language with extensions to support text editing.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('ncurses', '6.2'), - ('GTK+', '3.24.17'), - ('X11', '20200222'), - ('LibTIFF', '4.1.0'), -] - -configopts = '--x-includes=$EBROOTX11/include --x-libraries=$EBROOTX11/lib --with-gif=no --with-gnutls=no ' -configopts += '--with-x-toolkit=gtk3 --with-modules' - -sanity_check_paths = { - 'files': ["bin/emacs", "bin/emacs-%(version)s", "bin/emacsclient", "bin/etags"], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/e/Emacs/Emacs-27.1-GCCcore-9.3.0.eb b/Golden_Repo/e/Emacs/Emacs-27.1-GCCcore-9.3.0.eb deleted file mode 100644 index b7e41c7758f46000c181575fe5819fea19b1cb06..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Emacs/Emacs-27.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Emacs' -version = '27.1' - -homepage = 'http://www.gnu.org/software/emacs/' -description = """GNU Emacs is an extensible, customizable text editor--and more. - At its core is an interpreter for Emacs Lisp, a dialect of the Lisp programming - language with extensions to support text editing.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('ncurses', '6.2'), - ('GTK+', '3.24.17'), - ('X11', '20200222'), - ('LibTIFF', '4.1.0'), -] - -configopts = '--x-includes=$EBROOTX11/include --x-libraries=$EBROOTX11/lib --with-gif=no --with-gnutls=no ' -configopts += '--with-x-toolkit=gtk3 --with-modules' - -sanity_check_paths = { - 'files': ["bin/emacs", "bin/emacs-%(version)s", "bin/emacsclient", "bin/etags"], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/e/Embree/Embree-3.12.2-GCC-10.3.0.eb b/Golden_Repo/e/Embree/Embree-3.12.2-GCC-10.3.0.eb deleted file mode 100644 index 30cd5239637db0d286b1e6ed21a4476e16173f9d..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Embree/Embree-3.12.2-GCC-10.3.0.eb +++ /dev/null @@ -1,60 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Embree' -version = '3.12.2' - -homepage = 'http://www.ospray.org/' -description = """ -Embree is a collection of high-performance ray tracing kernels, developed at Intel. The target user of Embree are -graphics application engineers that want to improve the performance of their application by leveraging the optimized ray -tracing kernels of Embree. The kernels are optimized for photo-realistic rendering on the latest Intel processors with -support for SSE, AVX, AVX2, and AVX512. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -toolchainopts = {'optarch': True, 'pic': True} - -source_urls = ['https://github.com/embree/embree/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('pkg-config', '0.29.2'), - ('ispc', '1.12.0', '', SYSTEM), -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('ImageMagick', '7.0.10-25'), - ('OpenEXR', '2.5.2'), -] - -separate_build_dir = True - -configopts = '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DEMBREE_ISPC_SUPPORT=ON ' -configopts += '-DEMBREE_TASKING_SYSTEM=INTERNAL ' -# Select highest supported ISA (SSE2, SSE4.2, AVX, AVX2, AVX512KNL, AVX512SKX, or NONE) -configopts += '-DEMBREE_MAX_ISA=AVX2 ' -configopts += '-DEMBREE_GEOMETRY_HAIR:BOOL=ON ' -configopts += '-DEMBREE_GEOMETRY_LINES:BOOL=OFF ' -configopts += '-DEMBREE_GEOMETRY_QUADS:BOOL=OFF ' -configopts += '-DEMBREE_GEOMETRY_SUBDIV:BOOL=OFF ' -configopts += '-DEMBREE_TUTORIALS=OFF ' - -sanity_check_paths = { - 'dirs': ['include/embree3'], - 'files': ['lib64/libembree3.so'] -} - -modextrapaths = { - 'CMAKE_MODULE_PATH': 'lib64/cmake/embree-%(version)s/' -} - -moduleclass = 'vis' diff --git a/Golden_Repo/e/Embree/Embree-3.8.0-GCC-9.3.0.eb b/Golden_Repo/e/Embree/Embree-3.8.0-GCC-9.3.0.eb deleted file mode 100644 index 01990978119d3d01ca9b6a9b03e24f11152ba98f..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Embree/Embree-3.8.0-GCC-9.3.0.eb +++ /dev/null @@ -1,58 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Embree' -version = '3.8.0' - -homepage = 'http://www.ospray.org/' -description = """ -Embree is a collection of high-performance ray tracing kernels, developed at Intel. The target user of Embree are -graphics application engineers that want to improve the performance of their application by leveraging the optimized ray -tracing kernels of Embree. The kernels are optimized for photo-realistic rendering on the latest Intel processors with -support for SSE, AVX, AVX2, and AVX512. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'optarch': True, 'pic': True} - -source_urls = ['https://github.com/embree/embree/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('ispc', '1.12.0', '', SYSTEM), -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('ImageMagick', '7.0.10-25'), - ('OpenEXR', '2.5.2'), - ('tbb', '2020.3'), -] - -separate_build_dir = True - -configopts = '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DEMBREE_ISPC_SUPPORT=ON ' -configopts += '-DEMBREE_TASKING_SYSTEM=TBB ' -configopts += '-DEMBREE_TBB_ROOT=$EBROOTTBB ' -# Select highest supported ISA (SSE2, SSE4.2, AVX, AVX2, AVX512KNL, AVX512SKX, or NONE) -configopts += '-DEMBREE_MAX_ISA=AVX2 ' -configopts += '-DEMBREE_TUTORIALS=OFF ' - -sanity_check_paths = { - 'dirs': ['include/embree3'], - 'files': ['lib64/libembree3.so'] -} - -modextrapaths = { - 'CMAKE_MODULE_PATH': 'lib64/cmake/embree-%(version)s/' -} - -moduleclass = 'vis' diff --git a/Golden_Repo/e/Exiv2/Exiv2-0.27.3-GCCcore-10.3.0.eb b/Golden_Repo/e/Exiv2/Exiv2-0.27.3-GCCcore-10.3.0.eb deleted file mode 100644 index e31567f39732f5f318c32a8cd217e68f22be5c0a..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Exiv2/Exiv2-0.27.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Exiv2' -version = '0.27.3' - -homepage = 'http://www.exiv2.org' -description = """ - Exiv2 is a C++ library and a command line utility to manage image metadata. It provides fast and easy read and write - access to the Exif, IPTC and XMP metadata of digital images in various formats. Exiv2 is available as free software and - with a commercial license, and is used in many projects. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://www.exiv2.org/builds'] -sources = ['%(namelower)s-%(version)s-Source.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('expat', '2.2.9'), -] - -sanity_check_paths = { - 'files': ['bin/exiv2', 'lib/libexiv2.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/e/Exiv2/Exiv2-0.27.3-GCCcore-9.3.0.eb b/Golden_Repo/e/Exiv2/Exiv2-0.27.3-GCCcore-9.3.0.eb deleted file mode 100644 index 52ebf5497a41283bd8682d1f1f39d0e7dede68e4..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Exiv2/Exiv2-0.27.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Exiv2' -version = '0.27.3' - -homepage = 'http://www.exiv2.org' -description = """ - Exiv2 is a C++ library and a command line utility to manage image metadata. It provides fast and easy read and write - access to the Exif, IPTC and XMP metadata of digital images in various formats. Exiv2 is available as free software and - with a commercial license, and is used in many projects. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://www.exiv2.org/builds'] -sources = ['%(namelower)s-%(version)s-Source.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('expat', '2.2.9'), -] - -sanity_check_paths = { - 'files': ['bin/exiv2', 'lib/libexiv2.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/e/Extrae/Extrae-3.8.3-gompi-2020.eb b/Golden_Repo/e/Extrae/Extrae-3.8.3-gompi-2020.eb deleted file mode 100644 index 1ff03ae43ad95f2823e0bb77bd03645d8f5678c2..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Extrae/Extrae-3.8.3-gompi-2020.eb +++ /dev/null @@ -1,56 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013-2016 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'ConfigureMake' - -name = "Extrae" -version = "3.8.3" - -homepage = 'http://www.bsc.es/computer-sciences/performance-tools' -description = """Extrae is the core instrumentation package developed by the Performance Tools - group at BSC. Extrae is capable of instrumenting applications based on MPI, OpenMP, pthreads, - CUDA1, OpenCL1, and StarSs1 using different instrumentation approaches. The information gathered - by Extrae typically includes timestamped events of runtime calls, performance counters and source - code references. Besides, Extrae provides its own API to allow the user to manually instrument his - or her application. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {"usempi": True} - -source_urls = ['https://ftp.tools.bsc.es/extrae/'] -sources = ['extrae-%s-src.tar.bz2' % version] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('libunwind', '1.4.0'), - ('libxml2', '2.9.10'), - ('PAPI', '6.0.0'), - ('Boost', '1.74.0'), - ('libdwarf', '20201020'), - ('zlib', '1.2.11'), - ('CUDA', '11.0', '', SYSTEM) -] - -preconfigopts = 'autoreconf -vif && ' - -# Without Dyninst and without SIONlib -configopts = '--enable-posix-clock --with-libgomp-version=4.9 --enable-openmp --enable-sampling ' -configopts += '--with-binutils=$EBROOTBINUTILS --with-boost=$EBROOTBOOST --with-dwarf=$EBROOTLIBDWARF ' -configopts += '--with-mpi=$EBROOTOPENMPI --with-papi=$EBROOTPAPI --with-unwind=$EBROOTLIBUNWIND --with-libz=$EBROOTZLIB' -configopts += ' --without-dyninst ' -configopts += '--with-cuda=$EBROOTCUDA --with-cupti=$EBROOTCUDA/extras/CUPTI' - -moduleclass = 'perf' diff --git a/Golden_Repo/e/Extrae/Extrae-3.8.3-gpsmpi-2020.eb b/Golden_Repo/e/Extrae/Extrae-3.8.3-gpsmpi-2020.eb deleted file mode 100644 index 0acd69b01b66a4b24a05616c3161a65dd5fd5273..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Extrae/Extrae-3.8.3-gpsmpi-2020.eb +++ /dev/null @@ -1,56 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013-2016 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'ConfigureMake' - -name = "Extrae" -version = "3.8.3" - -homepage = 'http://www.bsc.es/computer-sciences/performance-tools' -description = """Extrae is the core instrumentation package developed by the Performance Tools - group at BSC. Extrae is capable of instrumenting applications based on MPI, OpenMP, pthreads, - CUDA1, OpenCL1, and StarSs1 using different instrumentation approaches. The information gathered - by Extrae typically includes timestamped events of runtime calls, performance counters and source - code references. Besides, Extrae provides its own API to allow the user to manually instrument his - or her application. -""" - -site_contacts = 'a.kreuzer@fz-juelich' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {"usempi": True} - -source_urls = ['https://ftp.tools.bsc.es/extrae/'] -sources = ['extrae-%s-src.tar.bz2' % version] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('libunwind', '1.4.0'), - ('libxml2', '2.9.10'), - ('PAPI', '6.0.0'), - ('Boost', '1.74.0'), - ('libdwarf', '20201020'), - ('zlib', '1.2.11'), - ('CUDA', '11.0', '', SYSTEM) -] - -preconfigopts = 'autoreconf -vif && ' - -# Without Dyninst and without SIONlib -configopts = '--enable-posix-clock --with-libgomp-version=4.9 --enable-openmp --enable-sampling ' -configopts += '--with-binutils=$EBROOTBINUTILS --with-boost=$EBROOTBOOST --with-dwarf=$EBROOTLIBDWARF ' -configopts += '--with-mpi=$EBROOTPSMPI --with-papi=$EBROOTPAPI --with-unwind=$EBROOTLIBUNWIND --with-libz=$EBROOTZLIB ' -configopts += '--without-dyninst ' -configopts += '--with-cuda=$EBROOTCUDA --with-cupti=$EBROOTCUDA/extras/CUPTI' - -moduleclass = 'perf' diff --git a/Golden_Repo/e/Extrae/Extrae-3.8.3-iimpi-2020.eb b/Golden_Repo/e/Extrae/Extrae-3.8.3-iimpi-2020.eb deleted file mode 100644 index b5e3db276c2958eb4eec24d94b865a610b46b9d3..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Extrae/Extrae-3.8.3-iimpi-2020.eb +++ /dev/null @@ -1,56 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013-2016 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'ConfigureMake' - -name = "Extrae" -version = "3.8.3" - -homepage = 'http://www.bsc.es/computer-sciences/performance-tools' -description = """Extrae is the core instrumentation package developed by the Performance Tools - group at BSC. Extrae is capable of instrumenting applications based on MPI, OpenMP, pthreads, - CUDA1, OpenCL1, and StarSs1 using different instrumentation approaches. The information gathered - by Extrae typically includes timestamped events of runtime calls, performance counters and source - code references. Besides, Extrae provides its own API to allow the user to manually instrument his - or her application. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {"usempi": True} - -source_urls = ['https://ftp.tools.bsc.es/extrae/'] -sources = ['extrae-%s-src.tar.bz2' % version] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('libunwind', '1.4.0'), - ('libxml2', '2.9.10'), - ('PAPI', '6.0.0'), - ('Boost', '1.73.0'), - ('libdwarf', '20201020'), - ('zlib', '1.2.11'), - ('CUDA', '11.0', '', SYSTEM) -] - -preconfigopts = 'autoreconf -vif && ' - -# Without Dyninst and without SIONlib -configopts = '--enable-posix-clock --with-libgomp-version=4.9 --enable-openmp --enable-sampling ' -configopts += '--with-binutils=$EBROOTBINUTILS --with-boost=$EBROOTBOOST --with-dwarf=$EBROOTLIBDWARF ' -configopts += '--with-mpi=$EBROOTIMPI/intel64 --with-papi=$EBROOTPAPI --with-unwind=$EBROOTLIBUNWIND ' -configopts += '--with-libz=$EBROOTZLIB --without-dyninst ' -configopts += '--with-cuda=$EBROOTCUDA --with-cupti=$EBROOTCUDA/extras/CUPTI' - -moduleclass = 'perf' diff --git a/Golden_Repo/e/Extrae/Extrae-3.8.3-iompi-2020.eb b/Golden_Repo/e/Extrae/Extrae-3.8.3-iompi-2020.eb deleted file mode 100644 index 309d6799a465f596c0f11ad5073a1e61d3024b08..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Extrae/Extrae-3.8.3-iompi-2020.eb +++ /dev/null @@ -1,56 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013-2016 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'ConfigureMake' - -name = "Extrae" -version = "3.8.3" - -homepage = 'http://www.bsc.es/computer-sciences/performance-tools' -description = """Extrae is the core instrumentation package developed by the Performance Tools - group at BSC. Extrae is capable of instrumenting applications based on MPI, OpenMP, pthreads, - CUDA1, OpenCL1, and StarSs1 using different instrumentation approaches. The information gathered - by Extrae typically includes timestamped events of runtime calls, performance counters and source - code references. Besides, Extrae provides its own API to allow the user to manually instrument his - or her application. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {"usempi": True} - -source_urls = ['https://ftp.tools.bsc.es/extrae/'] -sources = ['extrae-%s-src.tar.bz2' % version] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('libunwind', '1.4.0'), - ('libxml2', '2.9.10'), - ('PAPI', '6.0.0'), - ('Boost', '1.73.0'), - ('libdwarf', '20201020'), - ('zlib', '1.2.11'), - ('CUDA', '11.0', '', SYSTEM) -] - -preconfigopts = 'autoreconf -vif && ' - -# Without Dyninst and without SIONlib -configopts = '--enable-posix-clock --with-libgomp-version=4.9 --enable-openmp --enable-sampling ' -configopts += '--with-binutils=$EBROOTBINUTILS --with-boost=$EBROOTBOOST --with-dwarf=$EBROOTLIBDWARF ' -configopts += '--with-mpi=$EBROOTOPENMPI --with-papi=$EBROOTPAPI --with-unwind=$EBROOTLIBUNWIND --with-libz=$EBROOTZLIB' -configopts += ' --without-dyninst ' -configopts += '--with-cuda=$EBROOTCUDA --with-cupti=$EBROOTCUDA/extras/CUPTI' - -moduleclass = 'perf' diff --git a/Golden_Repo/e/Extrae/Extrae-3.8.3-ipsmpi-2020.eb b/Golden_Repo/e/Extrae/Extrae-3.8.3-ipsmpi-2020.eb deleted file mode 100644 index 611bbc750707c28d68fcfcc9d68147ee92800136..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/Extrae/Extrae-3.8.3-ipsmpi-2020.eb +++ /dev/null @@ -1,56 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013-2016 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'ConfigureMake' - -name = "Extrae" -version = "3.8.3" - -homepage = 'http://www.bsc.es/computer-sciences/performance-tools' -description = """Extrae is the core instrumentation package developed by the Performance Tools - group at BSC. Extrae is capable of instrumenting applications based on MPI, OpenMP, pthreads, - CUDA1, OpenCL1, and StarSs1 using different instrumentation approaches. The information gathered - by Extrae typically includes timestamped events of runtime calls, performance counters and source - code references. Besides, Extrae provides its own API to allow the user to manually instrument his - or her application. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {"usempi": True} - -source_urls = ['https://ftp.tools.bsc.es/extrae/'] -sources = ['extrae-%s-src.tar.bz2' % version] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('libunwind', '1.4.0'), - ('libxml2', '2.9.10'), - ('PAPI', '6.0.0'), - ('Boost', '1.73.0'), - ('libdwarf', '20201020'), - ('zlib', '1.2.11'), - ('CUDA', '11.0', '', SYSTEM) -] - -preconfigopts = 'autoreconf -vif && ' - -# Without Dyninst and without SIONlib -configopts = '--enable-posix-clock --with-libgomp-version=4.9 --enable-openmp --enable-sampling ' -configopts += '--with-binutils=$EBROOTBINUTILS --with-boost=$EBROOTBOOST --with-dwarf=$EBROOTLIBDWARF ' -configopts += '--with-mpi=$EBROOTPSMPI --with-papi=$EBROOTPAPI --with-unwind=$EBROOTLIBUNWIND --with-libz=$EBROOTZLIB ' -configopts += '--without-dyninst ' -configopts += '--with-cuda=$EBROOTCUDA --with-cupti=$EBROOTCUDA/extras/CUPTI' - -moduleclass = 'perf' diff --git a/Golden_Repo/e/ecCodes/ecCodes-2.18.0-gpsmpi-2020.eb b/Golden_Repo/e/ecCodes/ecCodes-2.18.0-gpsmpi-2020.eb deleted file mode 100644 index 9b4e019c132f559ef39f9c15a2ec2924fb201375..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ecCodes/ecCodes-2.18.0-gpsmpi-2020.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ecCodes' -version = '2.18.0' - -homepage = 'https://software.ecmwf.int/wiki/display/ECC/ecCodes+Home' -description = """ecCodes is a package developed by ECMWF which provides an application programming interface and - a set of tools for decoding and encoding messages in the following formats: WMO FM-92 GRIB edition 1 and edition 2, - WMO FM-94 BUFR edition 3 and edition 4, WMO GTS abbreviated header (only decoding).""" - -site_contacts = 'a.ghasemi@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['https://confluence.ecmwf.int/download/attachments/45757960/'] -sources = ['eccodes-%(version)s-Source.tar.gz'] - -builddependencies = [('CMake', '3.18.0')] -dependencies = [ - ('netCDF', '4.7.4'), - ('JasPer', '2.0.19'), -] - -separate_build_dir = True - -configopts = "-DENABLE_NETCDF=ON -DENABLE_JPG=ON -DENABLE_PYTHON=OFF" - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['bufr_copy', 'bufr_dump', 'bufr_filter', 'bufr_ls', - 'codes_count', 'codes_info', 'codes_split_file', - 'grib_copy', 'grib_dump', 'grib_filter', 'grib_ls']], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/e/ecCodes/ecCodes-2.18.0-ipsmpi-2020.eb b/Golden_Repo/e/ecCodes/ecCodes-2.18.0-ipsmpi-2020.eb deleted file mode 100644 index 6b4dd839e0acc447b84d30ddb11efa9b077c87a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ecCodes/ecCodes-2.18.0-ipsmpi-2020.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ecCodes' -version = '2.18.0' - -homepage = 'https://software.ecmwf.int/wiki/display/ECC/ecCodes+Home' -description = """ecCodes is a package developed by ECMWF which provides an application programming interface and - a set of tools for decoding and encoding messages in the following formats: WMO FM-92 GRIB edition 1 and edition 2, - WMO FM-94 BUFR edition 3 and edition 4, WMO GTS abbreviated header (only decoding).""" - -site_contacts = 'a.ghasemi@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = ['https://confluence.ecmwf.int/download/attachments/45757960/'] -sources = ['eccodes-%(version)s-Source.tar.gz'] - -builddependencies = [('CMake', '3.18.0')] -dependencies = [ - ('netCDF', '4.7.4'), - ('JasPer', '2.0.19'), -] - -separate_build_dir = True - -configopts = "-DENABLE_NETCDF=ON -DENABLE_JPG=ON -DENABLE_PYTHON=OFF" - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['bufr_copy', 'bufr_dump', 'bufr_filter', 'bufr_ls', - 'codes_count', 'codes_info', 'codes_split_file', - 'grib_copy', 'grib_dump', 'grib_filter', 'grib_ls']], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/e/ecCodes/ecCodes-2.18.0-npsmpic-2020.1.eb b/Golden_Repo/e/ecCodes/ecCodes-2.18.0-npsmpic-2020.1.eb deleted file mode 100644 index 064fa0fdc66799ac15b0c0b7326fd1799d30eec5..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ecCodes/ecCodes-2.18.0-npsmpic-2020.1.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ecCodes' -version = '2.18.0' - -homepage = 'https://software.ecmwf.int/wiki/display/ECC/ecCodes+Home' -description = """ecCodes is a package developed by ECMWF which provides an application programming interface and - a set of tools for decoding and encoding messages in the following formats: WMO FM-92 GRIB edition 1 and edition 2, - WMO FM-94 BUFR edition 3 and edition 4, WMO GTS abbreviated header (only decoding).""" - -site_contacts = 'a.ghasemi@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} - -source_urls = ['https://confluence.ecmwf.int/download/attachments/45757960/'] -sources = ['eccodes-%(version)s-Source.tar.gz'] - -builddependencies = [('CMake', '3.18.0')] -dependencies = [ - ('netCDF', '4.7.4'), - ('JasPer', '2.0.19'), -] - -separate_build_dir = True - -configopts = "-DENABLE_NETCDF=ON -DENABLE_JPG=ON -DENABLE_PYTHON=OFF" - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['bufr_copy', 'bufr_dump', 'bufr_filter', 'bufr_ls', - 'codes_count', 'codes_info', 'codes_split_file', - 'grib_copy', 'grib_dump', 'grib_filter', 'grib_ls']], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/e/ecCodes/ecCodes-2.18.0-npsmpic-2020.eb b/Golden_Repo/e/ecCodes/ecCodes-2.18.0-npsmpic-2020.eb deleted file mode 100644 index a2a69fc74411acf50157efeb7ae4261bcab34f6b..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/ecCodes/ecCodes-2.18.0-npsmpic-2020.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ecCodes' -version = '2.18.0' - -homepage = 'https://software.ecmwf.int/wiki/display/ECC/ecCodes+Home' -description = """ecCodes is a package developed by ECMWF which provides an application programming interface and - a set of tools for decoding and encoding messages in the following formats: WMO FM-92 GRIB edition 1 and edition 2, - WMO FM-94 BUFR edition 3 and edition 4, WMO GTS abbreviated header (only decoding).""" - -site_contacts = 'a.ghasemi@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} - -source_urls = ['https://confluence.ecmwf.int/download/attachments/45757960/'] -sources = ['eccodes-%(version)s-Source.tar.gz'] - -builddependencies = [('CMake', '3.18.0')] -dependencies = [ - ('netCDF', '4.7.4'), - ('JasPer', '2.0.19'), -] - -separate_build_dir = True - -configopts = "-DENABLE_NETCDF=ON -DENABLE_JPG=ON -DENABLE_PYTHON=OFF" - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['bufr_copy', 'bufr_dump', 'bufr_filter', 'bufr_ls', - 'codes_count', 'codes_info', 'codes_split_file', - 'grib_copy', 'grib_dump', 'grib_filter', 'grib_ls']], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/e/eudev/eudev-3.2.9-GCCcore-10.3.0.eb b/Golden_Repo/e/eudev/eudev-3.2.9-GCCcore-10.3.0.eb deleted file mode 100644 index c4b4950bbf759c1b71ed287388c49ce46f0ad1a3..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/eudev/eudev-3.2.9-GCCcore-10.3.0.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'eudev' -version = '3.2.9' - -homepage = 'https://wiki.gentoo.org/wiki/Project:Eudev' - -description = """ - eudev is a fork of systemd-udev with the goal of obtaining better - compatibility with existing software such as OpenRC and Upstart, - older kernels, various toolchains and anything else required by - users and various distributions. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://dev.gentoo.org/~blueness/%(name)s/'] -sources = [SOURCE_TAR_GZ] -patches = ['%(name)s-%(version)s_python3.patch'] -checksums = [ - '89618619084a19e1451d373c43f141b469c9fd09767973d73dd268b92074d4fc', - '846b1e72e12853c4146d3a4e312301001bbfb13110ce76de2afdf860f4d085a8', -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('gperf', '3.1'), - ('Python', '3.8.5'), -] - -osdependencies = [('kernel-headers', 'linux-libc-dev')] - -configopts = '--disable-blkid --disable-selinux --disable-manpages ' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['bin/udevadm', 'include/libudev.h', 'include/udev.h', - 'lib/libudev.so.1'], - 'dirs': [], -} - -moduleclass = 'system' diff --git a/Golden_Repo/e/eudev/eudev-3.2.9-GCCcore-9.3.0.eb b/Golden_Repo/e/eudev/eudev-3.2.9-GCCcore-9.3.0.eb deleted file mode 100644 index 1988f701ec1ef5f03d29a65834e3096f10337551..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/eudev/eudev-3.2.9-GCCcore-9.3.0.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'eudev' -version = '3.2.9' - -homepage = 'https://wiki.gentoo.org/wiki/Project:Eudev' - -description = """ - eudev is a fork of systemd-udev with the goal of obtaining better - compatibility with existing software such as OpenRC and Upstart, - older kernels, various toolchains and anything else required by - users and various distributions. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://dev.gentoo.org/~blueness/%(name)s/'] -sources = [SOURCE_TAR_GZ] -patches = ['%(name)s-%(version)s_python3.patch'] -checksums = [ - '89618619084a19e1451d373c43f141b469c9fd09767973d73dd268b92074d4fc', - '846b1e72e12853c4146d3a4e312301001bbfb13110ce76de2afdf860f4d085a8', -] - -builddependencies = [ - ('binutils', '2.34'), - ('gperf', '3.1'), - ('Python', '3.8.5'), -] - -osdependencies = [('kernel-headers', 'linux-libc-dev')] - -configopts = '--disable-blkid --disable-selinux --disable-manpages ' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['bin/udevadm', 'include/libudev.h', 'include/udev.h', - 'lib/libudev.so.1'], - 'dirs': [], -} - -moduleclass = 'system' diff --git a/Golden_Repo/e/eudev/eudev-3.2.9_python3.patch b/Golden_Repo/e/eudev/eudev-3.2.9_python3.patch deleted file mode 100644 index 6730aacf06124cfaf282723c931418dce9665ad0..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/eudev/eudev-3.2.9_python3.patch +++ /dev/null @@ -1,9 +0,0 @@ -diff -ruN eudev-3.2.9.orig/test/rule-syntax-check.py eudev-3.2.9/test/rule-syntax-check.py ---- eudev-3.2.9.orig/test/rule-syntax-check.py 2016-11-17 22:14:19.000000000 +0100 -+++ eudev-3.2.9/test/rule-syntax-check.py 2020-11-06 15:22:12.238868994 +0100 -@@ -1,4 +1,4 @@ --#!/usr/bin/python -+#!/usr/bin/env python3 - # Simple udev rules syntax checker - # - # (C) 2010 Canonical Ltd. diff --git a/Golden_Repo/e/expat/expat-2.2.9-GCCcore-10.3.0.eb b/Golden_Repo/e/expat/expat-2.2.9-GCCcore-10.3.0.eb deleted file mode 100644 index b97ac4698badb8adcb8dc4308ed8ed6295cb2133..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/expat/expat-2.2.9-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'expat' -version = '2.2.9' - -homepage = 'https://libexpat.github.io' - -description = """ -Expat is an XML parser library written in C. It is a stream-oriented parser -in which an application registers handlers for things the parser might find -in the XML document (like start tags) -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/libexpat/libexpat/releases/download/R_%s/' % - version.replace('.', '_')] -sources = [SOURCE_TAR_BZ2] -checksums = ['f1063084dc4302a427dabcca499c8312b3a32a29b7d2506653ecc8f950a9a237'] - -builddependencies = [('binutils', '2.36.1')] - -# Since expat 2.2.6, docbook2X is needed to produce manpage of xmlwf. -# Docbook2X needs XML-Parser and XML-Parser needs expat. -# -> circular dependency. "--without-docbook" breaks this circle. -configopts = ['--without-docbook'] - -sanity_check_paths = { - 'files': ['include/expat.h', 'lib/libexpat.a', 'lib/libexpat.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/e/expat/expat-2.2.9-GCCcore-9.3.0.eb b/Golden_Repo/e/expat/expat-2.2.9-GCCcore-9.3.0.eb deleted file mode 100644 index f8cb4fd6d6c7788ac26e8eec7feee9e25a9b01bb..0000000000000000000000000000000000000000 --- a/Golden_Repo/e/expat/expat-2.2.9-GCCcore-9.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'expat' -version = '2.2.9' - -homepage = 'http://expat.sourceforge.net/' -description = """Expat is an XML parser library written in C. It is a - stream-oriented parser in which an application registers - handlers for things the parser might find in the XML document - (like start tags)""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_BZ2] -source_urls = [SOURCEFORGE_SOURCE] - -builddependencies = [ - ('binutils', '2.34'), -] - -moduleclass = 'tools' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-GCCcore-10.3.0-nompi.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-GCCcore-10.3.0-nompi.eb deleted file mode 100644 index a0188feb72bc3a734446ec9605968f5495eb0c13..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-GCCcore-10.3.0-nompi.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'FFTW' -version = '3.3.8' -versionsuffix = '-nompi' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -# We hide it here since this should be used just for Jupyter and the MPI version should be preferred for normal cases -hidden = True - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because Intel compilers do not support FMA4 instructions -use_fma4 = True - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-GCCcore-9.3.0-nompi.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-GCCcore-9.3.0-nompi.eb deleted file mode 100644 index d39a648a5424ec10a049baf1336f24054456422d..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-GCCcore-9.3.0-nompi.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'FFTW' -version = '3.3.8' -versionsuffix = '-nompi' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -builddependencies = [ - ('binutils', '2.34'), -] - -# We hide it here since this should be used just for Jupyter and the MPI version should be preferred for normal cases -hidden = True - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because Intel compilers do not support FMA4 instructions -use_fma4 = True - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-gompi-2020.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-gompi-2020.eb deleted file mode 100644 index 5348a7fe8ffe54ecc9c551c3cae8dfb5c035e94f..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-gompi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because Intel compilers do not support FMA4 instructions -use_fma4 = True - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-gompi-2021.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-gompi-2021.eb deleted file mode 100644 index 17376217eacc32dc3161faa34c8f70d6408ee9bd..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-gompi-2021.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because Intel compilers do not support FMA4 instructions -use_fma4 = True - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb deleted file mode 100644 index 09e4d95027781b789ffc44f2df97573f68d01d20..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because Intel compilers do not support FMA4 instructions -use_fma4 = True - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-gpsmpi-2021.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-gpsmpi-2021.eb deleted file mode 100644 index b018bc04c1779601725e295f5b6853629df9018f..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-gpsmpi-2021.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because Intel compilers do not support FMA4 instructions -use_fma4 = True - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-iimpi-2020.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-iimpi-2020.eb deleted file mode 100644 index 16668a684c08f769962a6478232e9ac832043456..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-iimpi-2020.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] - -# See https://github.com/FFTW/fftw3/commit/10e2040af822a08ed49d2f6a1db45a7a3ad50582 -patches = ['fftw_no-gcc-intel_2020.patch'] - -checksums = [ - '6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303', - 'f226cc6dbdc9d11d4340567ef3227d78284c4dc44b8e63c3901a079aa9527da6' -] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = False - -# Intel compilers do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-iimpi-2021.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-iimpi-2021.eb deleted file mode 100644 index 44d0d51fb8d15c41003a64051f1014b1be2f4d7f..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-iimpi-2021.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] - -# See https://github.com/FFTW/fftw3/commit/10e2040af822a08ed49d2f6a1db45a7a3ad50582 -patches = ['fftw_no-gcc-intel_2021.patch'] - -checksums = [ - '6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303', - 'f226cc6dbdc9d11d4340567ef3227d78284c4dc44b8e63c3901a079aa9527da6' -] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = False - -# Intel compilers do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-iompi-2020.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-iompi-2020.eb deleted file mode 100644 index afd7ab7024c00636f61c07fa743eeb09b894bdfc..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-iompi-2020.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] - -# See https://github.com/FFTW/fftw3/commit/10e2040af822a08ed49d2f6a1db45a7a3ad50582 -patches = ['fftw_no-gcc-intel_2020.patch'] - -checksums = [ - '6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303', - 'f226cc6dbdc9d11d4340567ef3227d78284c4dc44b8e63c3901a079aa9527da6' -] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = False - -# Intel compilers do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-iompi-2021.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-iompi-2021.eb deleted file mode 100644 index dc2ff6c6c6b6a3c6463035d2b032bbce03a53a42..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-iompi-2021.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] - -# See https://github.com/FFTW/fftw3/commit/10e2040af822a08ed49d2f6a1db45a7a3ad50582 -patches = ['fftw_no-gcc-intel_2021.patch'] - -checksums = [ - '6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303', - 'f226cc6dbdc9d11d4340567ef3227d78284c4dc44b8e63c3901a079aa9527da6' -] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = False - -# Intel compilers do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2020-mt.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2020-mt.eb deleted file mode 100644 index ec54be2c15a4b1e054dc17e1d5c53fc2b79d4150..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] - -# See https://github.com/FFTW/fftw3/commit/10e2040af822a08ed49d2f6a1db45a7a3ad50582 -patches = ['fftw_no-gcc-intel_2020.patch'] - -checksums = [ - '6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303', - 'f226cc6dbdc9d11d4340567ef3227d78284c4dc44b8e63c3901a079aa9527da6' -] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = False - -# Intel compilers do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2020.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2020.eb deleted file mode 100644 index 1ccf3952789b9807eaee57cd20c5bb3f10f04f32..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2020.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] - -# See https://github.com/FFTW/fftw3/commit/10e2040af822a08ed49d2f6a1db45a7a3ad50582 -patches = ['fftw_no-gcc-intel_2020.patch'] - -checksums = [ - '6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303', - 'f226cc6dbdc9d11d4340567ef3227d78284c4dc44b8e63c3901a079aa9527da6' -] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = False - -# Intel compilers do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2021.eb b/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2021.eb deleted file mode 100644 index 964f0b505fb24ba3ef061969ee019d57b98a2ccf..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/FFTW-3.3.8-ipsmpi-2021.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] - -# See https://github.com/FFTW/fftw3/commit/10e2040af822a08ed49d2f6a1db45a7a3ad50582 -patches = ['fftw_no-gcc-intel_2021.patch'] - -checksums = [ - '6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303', - 'f226cc6dbdc9d11d4340567ef3227d78284c4dc44b8e63c3901a079aa9527da6' -] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = False - -# Intel compilers do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/f/FFTW/fftw_no-gcc-intel_2020.patch b/Golden_Repo/f/FFTW/fftw_no-gcc-intel_2020.patch deleted file mode 100644 index bba9be08897b55924c174905a0a2f095abf184b2..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/fftw_no-gcc-intel_2020.patch +++ /dev/null @@ -1,55 +0,0 @@ -diff -ruN fftw-3.3.8.orig/configure fftw-3.3.8/configure ---- fftw-3.3.8.orig/configure 2018-05-24 14:03:34.000000000 +0200 -+++ fftw-3.3.8/configure 2020-10-19 10:43:04.760401267 +0200 -@@ -14858,51 +14858,6 @@ - - - case "${ax_cv_c_compiler_vendor}" in -- intel) # Stop icc from defining __GNUC__, except on MacOS where this fails -- case "${host_os}" in -- *darwin*) ;; # icc -no-gcc fails to compile some system headers -- *) -- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -no-gcc" >&5 --$as_echo_n "checking whether C compiler accepts -no-gcc... " >&6; } --if ${ax_cv_c_flags__no_gcc+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- -- ax_save_FLAGS=$CFLAGS -- CFLAGS="-no-gcc" -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext --/* end confdefs.h. */ -- --int --main () --{ -- -- ; -- return 0; --} --_ACEOF --if ac_fn_c_try_compile "$LINENO"; then : -- ax_cv_c_flags__no_gcc=yes --else -- ax_cv_c_flags__no_gcc=no --fi --rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -- CFLAGS=$ax_save_FLAGS --fi -- --eval ax_check_compiler_flags=$ax_cv_c_flags__no_gcc --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ax_check_compiler_flags" >&5 --$as_echo "$ax_check_compiler_flags" >&6; } --if test "x$ax_check_compiler_flags" = xyes; then -- CC="$CC -no-gcc" --else -- : --fi -- -- ;; -- esac -- ;; -- - hp) # must (sometimes) manually increase cpp limits to handle fftw3.h - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -Wp,-H128000" >&5 - $as_echo_n "checking whether C compiler accepts -Wp,-H128000... " >&6; } diff --git a/Golden_Repo/f/FFTW/fftw_no-gcc-intel_2021.patch b/Golden_Repo/f/FFTW/fftw_no-gcc-intel_2021.patch deleted file mode 100644 index bba9be08897b55924c174905a0a2f095abf184b2..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFTW/fftw_no-gcc-intel_2021.patch +++ /dev/null @@ -1,55 +0,0 @@ -diff -ruN fftw-3.3.8.orig/configure fftw-3.3.8/configure ---- fftw-3.3.8.orig/configure 2018-05-24 14:03:34.000000000 +0200 -+++ fftw-3.3.8/configure 2020-10-19 10:43:04.760401267 +0200 -@@ -14858,51 +14858,6 @@ - - - case "${ax_cv_c_compiler_vendor}" in -- intel) # Stop icc from defining __GNUC__, except on MacOS where this fails -- case "${host_os}" in -- *darwin*) ;; # icc -no-gcc fails to compile some system headers -- *) -- { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -no-gcc" >&5 --$as_echo_n "checking whether C compiler accepts -no-gcc... " >&6; } --if ${ax_cv_c_flags__no_gcc+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- -- ax_save_FLAGS=$CFLAGS -- CFLAGS="-no-gcc" -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext --/* end confdefs.h. */ -- --int --main () --{ -- -- ; -- return 0; --} --_ACEOF --if ac_fn_c_try_compile "$LINENO"; then : -- ax_cv_c_flags__no_gcc=yes --else -- ax_cv_c_flags__no_gcc=no --fi --rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -- CFLAGS=$ax_save_FLAGS --fi -- --eval ax_check_compiler_flags=$ax_cv_c_flags__no_gcc --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ax_check_compiler_flags" >&5 --$as_echo "$ax_check_compiler_flags" >&6; } --if test "x$ax_check_compiler_flags" = xyes; then -- CC="$CC -no-gcc" --else -- : --fi -- -- ;; -- esac -- ;; -- - hp) # must (sometimes) manually increase cpp limits to handle fftw3.h - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -Wp,-H128000" >&5 - $as_echo_n "checking whether C compiler accepts -Wp,-H128000... " >&6; } diff --git a/Golden_Repo/f/FFmpeg/FFmpeg-4.3.1-GCCcore-10.3.0.eb b/Golden_Repo/f/FFmpeg/FFmpeg-4.3.1-GCCcore-10.3.0.eb deleted file mode 100644 index d372892f11e35cd06d8d5ae296ebfe6ea5c1ded3..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFmpeg/FFmpeg-4.3.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -# Built with EasyBuild version 4.4.0 on 2021-06-21_20-21-54 -easyblock = 'ConfigureMake' - -name = 'FFmpeg' -version = '4.3.1' - -homepage = 'https://www.ffmpeg.org/' -description = """A complete, cross-platform solution to record, convert and stream audio and video.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://ffmpeg.org/releases/'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['f4a4ac63946b6eee3bbdde523e298fca6019d048d6e1db0d1439a62cea65f0d9'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2') -] - -dependencies = [ - ('NASM', '2.15.03'), - ('zlib', '1.2.11'), - ('bzip2', '1.0.8'), - ('x264', '20200912'), - ('LAME', '3.100'), - ('x265', '3.4'), - ('libvpx', '1.9.0'), - ('X11', '20200222'), - ('freetype', '2.10.1'), - ('fontconfig', '2.13.92'), - ('FriBidi', '1.0.9'), -] - -configopts = '--enable-pic --enable-shared --enable-gpl --enable-version3 --enable-nonfree --cc="$CC" --cxx="$CXX" ' -configopts += '--enable-libx264 --enable-libx265 --enable-libmp3lame --enable-libfreetype --enable-fontconfig ' -configopts += '--enable-libfribidi --enable-libvpx' - -sanity_check_paths = { - 'files': ['bin/ff%s' % x for x in ['mpeg', 'probe']] + - ['lib/lib%s.%s' % (x, y) for x in ['avdevice', 'avfilter', 'avformat', 'avcodec', 'postproc', - 'swresample', 'swscale', 'avutil'] for y in [SHLIB_EXT, 'a']], - 'dirs': ['include'] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/f/FFmpeg/FFmpeg-4.3.1-GCCcore-9.3.0.eb b/Golden_Repo/f/FFmpeg/FFmpeg-4.3.1-GCCcore-9.3.0.eb deleted file mode 100644 index f6c25e15e07e48fb68d6fc9cb47836a7d7e96988..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFmpeg/FFmpeg-4.3.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'FFmpeg' -version = '4.3.1' - -homepage = 'https://www.ffmpeg.org/' -description = """A complete, cross-platform solution to record, convert and stream audio and video.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://ffmpeg.org/releases/'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['f4a4ac63946b6eee3bbdde523e298fca6019d048d6e1db0d1439a62cea65f0d9'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2') -] - -dependencies = [ - ('NASM', '2.15.03'), - ('zlib', '1.2.11'), - ('bzip2', '1.0.8'), - ('x264', '20200912'), - ('LAME', '3.100'), - ('x265', '3.4'), - ('libvpx', '1.9.0'), - ('X11', '20200222'), - ('freetype', '2.10.1'), - ('fontconfig', '2.13.92'), - ('FriBidi', '1.0.9'), -] - -configopts = '--enable-pic --enable-shared --enable-gpl --enable-version3 --enable-nonfree --cc="$CC" --cxx="$CXX" ' -configopts += '--enable-libx264 --enable-libx265 --enable-libmp3lame --enable-libfreetype --enable-fontconfig ' -configopts += '--enable-libfribidi --enable-libvpx' - -sanity_check_paths = { - 'files': ['bin/ff%s' % x for x in ['mpeg', 'probe']] + - ['lib/lib%s.%s' % (x, y) for x in ['avdevice', 'avfilter', 'avformat', 'avcodec', 'postproc', - 'swresample', 'swscale', 'avutil'] for y in [SHLIB_EXT, 'a']], - 'dirs': ['include'] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/f/FFmpeg/FFmpeg-4.4-GCCcore-10.3.0.eb b/Golden_Repo/f/FFmpeg/FFmpeg-4.4-GCCcore-10.3.0.eb deleted file mode 100644 index 9117ead0074875e1fca120f74f029035f8138fa1..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FFmpeg/FFmpeg-4.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,71 +0,0 @@ -# Built with EasyBuild version 4.4.0 on 2021-06-21_20-21-54 -easyblock = 'ConfigureMake' - -name = 'FFmpeg' -version = '4.4' -local_ffnvcodec_version = '11.0.10.1' - -homepage = 'https://www.ffmpeg.org/' -description = """A complete, cross-platform solution to record, convert and stream audio and video.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'https://ffmpeg.org/releases/', - 'https://github.com/FFmpeg/nv-codec-headers/releases/download/n%s/' % local_ffnvcodec_version, -] -sources = [ - SOURCELOWER_TAR_BZ2, - 'nv-codec-headers-%s.tar.gz' % local_ffnvcodec_version, -] -checksums = [ - '42093549751b582cf0f338a21a3664f52e0a9fbe0d238d3c992005e493607d0e', - '97e37b85922f1167b2f0bf0bb804c3d7266cc679e78814fe820cf8912a0e1291', -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2') -] - -dependencies = [ - ('NASM', '2.15.03'), - ('zlib', '1.2.11'), - ('bzip2', '1.0.8'), - ('x264', '20200912'), - ('LAME', '3.100'), - ('x265', '3.4'), - ('libvpx', '1.9.0'), - ('X11', '20200222'), - ('freetype', '2.10.1'), - ('fontconfig', '2.13.92'), - ('FriBidi', '1.0.9'), - ('CUDA', '11.3', '', SYSTEM), -] - -preconfigopts = 'pushd %%(builddir)s/nv-codec-headers-%s/ && ' % local_ffnvcodec_version -preconfigopts += 'make install PREFIX=%(builddir)s/ffnvcodec && ' -preconfigopts += 'popd && ' -preconfigopts += 'export PKG_CONFIG_PATH=%(builddir)s/ffnvcodec/lib/pkgconfig/:$PKG_CONFIG_PATH && ' - -configopts = '--enable-pic --enable-shared --enable-gpl --enable-version3 --enable-nonfree --cc="$CC" --cxx="$CXX" ' -configopts += '--enable-libx264 --enable-libx265 --enable-libmp3lame --enable-libfreetype --enable-fontconfig ' -configopts += '--enable-libfribidi --enable-libvpx ' -configopts += '--enable-cuda-nvcc --enable-libnpp --enable-nvenc --enable-cuvid ' -configopts += '--extra-cflags=-I${EBROOTCUDA}/include --extra-ldflags=-L${EBROOTCUDA}/lib64 ' -# FFmpeg embeds ptx assembly code. Hence, '-ptx' is added to nvccflags by configure. -# This means that we cannot add more than a single architecture here. -# But new NVIDIA GPUs should be backward compatible. -configopts += '--nvccflags="-O2 -gencode=arch=compute_60,code=sm_60" ' -configopts += '--logfile=log.txt ' - -sanity_check_paths = { - 'files': ['bin/ff%s' % x for x in ['mpeg', 'probe']] + - ['lib/lib%s.%s' % (x, y) for x in ['avdevice', 'avfilter', 'avformat', 'avcodec', 'postproc', - 'swresample', 'swscale', 'avutil'] for y in [SHLIB_EXT, 'a']], - 'dirs': ['include'] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/f/FLTK/FLTK-1.3.5-GCCcore-10.3.0.eb b/Golden_Repo/f/FLTK/FLTK-1.3.5-GCCcore-10.3.0.eb deleted file mode 100644 index b883dd244c50280a9ac64f885b537981cd8f8007..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FLTK/FLTK-1.3.5-GCCcore-10.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -# -# author: Dina Mahmoud Ibrahim ( Cairo University ) -# -easyblock = 'ConfigureMake' - -name = 'FLTK' -version = '1.3.5' - -homepage = 'http://www.fltk.org' -description = """FLTK is a cross-platform C++ GUI toolkit for UNIX/Linux (X11), Microsoft Windows, - and MacOS X. FLTK provides modern GUI functionality without the bloat and supports 3D graphics via OpenGL - and its built-in GLUT emulation. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -sources = ['%(namelower)s-%(version)s-source.tar.gz'] -source_urls = ['http://fltk.org/pub/%(namelower)s/%(version)s/'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), -] - -sanity_check_paths = { - 'files': ['bin/fltk-config', 'bin/fluid'], - 'dirs': ['lib'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/f/FLTK/FLTK-1.3.5-GCCcore-9.3.0.eb b/Golden_Repo/f/FLTK/FLTK-1.3.5-GCCcore-9.3.0.eb deleted file mode 100644 index e98e1fcb455d699d959fa9e00de255792479edbb..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FLTK/FLTK-1.3.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -# -# author: Dina Mahmoud Ibrahim ( Cairo University ) -# -easyblock = 'ConfigureMake' - -name = 'FLTK' -version = '1.3.5' - -homepage = 'http://www.fltk.org' -description = """FLTK is a cross-platform C++ GUI toolkit for UNIX/Linux (X11), Microsoft Windows, - and MacOS X. FLTK provides modern GUI functionality without the bloat and supports 3D graphics via OpenGL - and its built-in GLUT emulation. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = ['%(namelower)s-%(version)s-source.tar.gz'] -source_urls = ['http://fltk.org/pub/%(namelower)s/%(version)s/'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), -] - -sanity_check_paths = { - 'files': ['bin/fltk-config', 'bin/fluid'], - 'dirs': ['lib'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/f/Fiona/Fiona-1.8.16-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/f/Fiona/Fiona-1.8.16-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index ada1c48ea2f5913390314432c8e10b2c825a3db4..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/Fiona/Fiona-1.8.16-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'Fiona' -version = '1.8.16' -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://github.com/Toblerity/Fiona' -description = """Fiona is designed to be simple and dependable. It focuses on reading and writing data -in standard Python IO style and relies upon familiar Python types and protocols such as files, dictionaries, -mappings, and iterators instead of classes specific to OGR. Fiona can read and write real-world data using -multi-layered GIS formats and zipped virtual file systems and integrates readily with other Python GIS -packages such as pyproj, Rtree, and Shapely.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -dependencies = [ - ('Python', '3.8.5'), - ('GDAL', '3.1.2', versionsuffix), - ('Shapely', '1.7.1', versionsuffix), # optional -] - -use_pip = True - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cligj', '0.5.0', { - 'checksums': ['6c7d52d529a78712491974f975c33473f430c0f7beb18c0d7a402a743dcb460a'], - }), - ('click-plugins', '1.1.1', { - 'checksums': ['46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b'], - }), - ('munch', '2.5.0', { - 'checksums': ['2d735f6f24d4dba3417fa448cae40c6e896ec1fdab6cdb5e6510999758a4dbd2'], - }), - (name, version, { - 'checksums': ['fd6dfb65959becc916e9f6928618bfd59c16cdbc413ece0fbac61489cd11255f'], - }), -] - -sanity_pip_check = True - -sanity_check_paths = { - 'files': ['bin/fio'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/f/FriBidi/FriBidi-1.0.9-GCCcore-10.3.0.eb b/Golden_Repo/f/FriBidi/FriBidi-1.0.9-GCCcore-10.3.0.eb deleted file mode 100644 index b2d8a0ee0e5b3039928a6c023f52466a4f0689df..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FriBidi/FriBidi-1.0.9-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -# Authors:: Jack Perdue <j-perdue@tamu.edu> - TAMU HPRC - https://hprc.tamu.edu - -easyblock = 'ConfigureMake' - -name = 'FriBidi' -version = '1.0.9' - -homepage = 'https://github.com/fribidi/fribidi' - -description = """ - The Free Implementation of the Unicode Bidirectional Algorithm. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/%(namelower)s/%(namelower)s/releases/download/v%(version)s'] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('Autotools', '20200321'), - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -configopts = '--disable-docs' - -sanity_check_paths = { - 'files': ['bin/%(namelower)s', 'include/%(namelower)s/%(namelower)s.h', - 'lib/lib%%(namelower)s.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/f/FriBidi/FriBidi-1.0.9-GCCcore-9.3.0.eb b/Golden_Repo/f/FriBidi/FriBidi-1.0.9-GCCcore-9.3.0.eb deleted file mode 100644 index de7fc365fb3aa5e08baa65bc9dff108579f9b4aa..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/FriBidi/FriBidi-1.0.9-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -# Authors:: Jack Perdue <j-perdue@tamu.edu> - TAMU HPRC - https://hprc.tamu.edu - -easyblock = 'ConfigureMake' - -name = 'FriBidi' -version = '1.0.9' - -homepage = 'https://github.com/fribidi/fribidi' - -description = """ - The Free Implementation of the Unicode Bidirectional Algorithm. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/%(namelower)s/%(namelower)s/releases/download/v%(version)s'] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('Autotools', '20200321'), - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -configopts = '--disable-docs' - -sanity_check_paths = { - 'files': ['bin/%(namelower)s', 'include/%(namelower)s/%(namelower)s.h', - 'lib/lib%%(namelower)s.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/f/fastai/fastai-2.2.7-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/f/fastai/fastai-2.2.7-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index f2901b0f7e469450c7258e599e0dca8ed8413311..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/fastai/fastai-2.2.7-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,58 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'fastai' -version = '2.2.7' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://docs.fast.ai' -description = """fastai simplifies training fast and accurate neural nets using modern best practices -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), - ('PyTorch', '1.7.0', versionsuffix), - ('scikit', '2021', versionsuffix), -] - -exts_defaultclass = 'PythonPackage' -exts_default_options = {'source_urls': [PYPI_SOURCE]} -use_pip = True -exts_download_dep_fail = True -sanity_pip_check = True - -exts_list = [ - ('blis', '0.7.4', { # Blis AMD from the system is not recognized - 'source_tmpl': 'blis-%(version)s-cp38-cp38-manylinux2014_x86_64.whl', - 'unpack_sources': False, - }), - ('catalogue', '1.0.0'), - ('wasabi', '0.8.2'), - ('tqdm', '4.59.0'), - ('plac', '1.1.3'), - ('srsly', '1.0.5'), - ('fastcore', '1.3.8'), # Version parity with jupyter. - ('fastprogress', '1.0.0'), - ('cymem', '2.0.5'), - ('murmurhash', '1.0.5'), - ('preshed', '3.0.5'), - ('thinc', '7.4.5', { - 'source_tmpl': 'thinc-%(version)s-cp38-cp38-manylinux2014_x86_64.whl', - 'unpack_sources': False, - }), - ('torchvision', '0.8.1', { # Torchvision from github gives wrong version number - 'source_tmpl': 'torchvision-%(version)s-cp38-cp38-manylinux1_x86_64.whl', - 'unpack_sources': False, - }), - ('spacy', '2.3.5', { - 'source_tmpl': 'spacy-%(version)s-cp38-cp38-manylinux2014_x86_64.whl', - 'unpack_sources': False, - }), - ('fastai', version), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/f/flatbuffers/flatbuffers-1.12.0-GCCcore-9.3.0.eb b/Golden_Repo/f/flatbuffers/flatbuffers-1.12.0-GCCcore-9.3.0.eb deleted file mode 100644 index 95fba87960c110a73e4105e20cf5a0e1526cbf7f..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/flatbuffers/flatbuffers-1.12.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'CMakeNinja' - -name = 'flatbuffers' -version = '1.12.0' - -homepage = 'https://github.com/google/flatbuffers/' -description = """FlatBuffers: Memory Efficient Serialization Library""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/google/flatbuffers/archive/v%(version)s/'] -sources = [SOURCE_TAR_GZ] -checksums = ['62f2223fb9181d1d6338451375628975775f7522185266cd5296571ac152bc45'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Ninja', '1.10.0'), -] - -configopts = '-DFLATBUFFERS_ENABLE_PCH=ON ' - -sanity_check_paths = { - 'files': ['include/flatbuffers/flatbuffers.h', 'bin/flatc', 'lib/libflatbuffers.a'], - 'dirs': ['lib/cmake'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/f/flex/flex-2.6.3-GCCcore-10.3.0.eb b/Golden_Repo/f/flex/flex-2.6.3-GCCcore-10.3.0.eb deleted file mode 100644 index 2e905c58bfd261eed51d344d1ce345fcf1b38144..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/flex/flex-2.6.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'flex' -version = '2.6.3' - -homepage = 'http://flex.sourceforge.net/' -description = """Flex (Fast Lexical Analyzer) is a tool for generating scanners. A scanner, - sometimes called a tokenizer, is a program which recognizes lexical patterns in text. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['https://github.com/westes/flex/releases/download/v%(version)s/'] - -dependencies = [('Bison', '3.7.6')] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [('binutils', '2.36.1', '', SYSTEM)] - -parallel = 1 - -moduleclass = 'lang' diff --git a/Golden_Repo/f/flex/flex-2.6.3-GCCcore-9.3.0.eb b/Golden_Repo/f/flex/flex-2.6.3-GCCcore-9.3.0.eb deleted file mode 100644 index 01ccbd6c324530c8c8fc69881012935f381b3c38..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/flex/flex-2.6.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'flex' -version = '2.6.3' - -homepage = 'http://flex.sourceforge.net/' -description = """Flex (Fast Lexical Analyzer) is a tool for generating scanners. A scanner, - sometimes called a tokenizer, is a program which recognizes lexical patterns in text. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['https://github.com/westes/flex/releases/download/v%(version)s/'] - -dependencies = [('Bison', '3.6.4')] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [('binutils', '2.34', '', SYSTEM)] - -parallel = 1 - -moduleclass = 'lang' diff --git a/Golden_Repo/f/flex/flex-2.6.4-GCCcore-10.3.0.eb b/Golden_Repo/f/flex/flex-2.6.4-GCCcore-10.3.0.eb deleted file mode 100644 index 83dd97f26b8218074109bc8fa7973bcb97592d5a..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/flex/flex-2.6.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -name = 'flex' -version = '2.6.4' - -homepage = 'http://flex.sourceforge.net/' - -description = """ - Flex (Fast Lexical Analyzer) is a tool for generating scanners. A scanner, - sometimes called a tokenizer, is a program which recognizes lexical patterns - in text. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://github.com/westes/flex/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995'] - -builddependencies = [ - ('Bison', '3.7.6'), - ('help2man', '1.48.3'), - # use same binutils version that was used when building GCC toolchain - ('binutils', '2.36.1', '', SYSTEM), -] - -dependencies = [ - ('M4', '1.4.18'), -] - -# glibc 2.26 requires _GNU_SOURCE defined to expose reallocarray in the correct -# header, see https://github.com/westes/flex/issues/241 -preconfigopts = 'export CPPFLAGS="$CPPFLAGS -D_GNU_SOURCE" && ' - -moduleclass = 'lang' diff --git a/Golden_Repo/f/flex/flex-2.6.4-GCCcore-9.3.0.eb b/Golden_Repo/f/flex/flex-2.6.4-GCCcore-9.3.0.eb deleted file mode 100644 index 97737fd2e757a0715479584fa685ff32e9201ad6..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/flex/flex-2.6.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'flex' -version = '2.6.4' - -homepage = 'http://flex.sourceforge.net/' -description = """Flex (Fast Lexical Analyzer) is a tool for generating scanners. A scanner, - sometimes called a tokenizer, is a program which recognizes lexical patterns in text. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['https://github.com/westes/flex/releases/download/v%(version)s/'] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [ - ('binutils', '2.34', '', SYSTEM), - ('M4', '1.4.18') -] - -dependencies = [('Bison', '3.6.4')] - -parallel = 1 - -moduleclass = 'lang' diff --git a/Golden_Repo/f/flex/flex-2.6.4.eb b/Golden_Repo/f/flex/flex-2.6.4.eb deleted file mode 100644 index aeca4b8b273bdd968dc462b53393089b290877be..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/flex/flex-2.6.4.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'flex' -version = '2.6.4' - -homepage = 'http://flex.sourceforge.net/' -description = """Flex (Fast Lexical Analyzer) is a tool for generating scanners. A scanner, - sometimes called a tokenizer, is a program which recognizes lexical patterns in text.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'https://github.com/westes/flex/releases/download/v%(version)s/'] - -builddependencies = [ - ('M4', '1.4.18'), -] - -dependencies = [ - ('Bison', '3.7.6'), -] - -hidden = True - -moduleclass = 'lang' diff --git a/Golden_Repo/f/fontconfig/fontconfig-2.13.92-GCCcore-10.3.0.eb b/Golden_Repo/f/fontconfig/fontconfig-2.13.92-GCCcore-10.3.0.eb deleted file mode 100644 index da069a89012aa481eb4e4ac4eb49eedf826f1a06..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/fontconfig/fontconfig-2.13.92-GCCcore-10.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'fontconfig' -version = '2.13.92' - -homepage = 'http://www.freedesktop.org/software/fontconfig' -description = """Fontconfig is a library designed to provide system-wide font configuration, customization and -application access. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://www.freedesktop.org/software/fontconfig/release/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('expat', '2.2.9'), - ('freetype', '2.10.1'), - ('gperf', '3.1'), - ('util-linux', '2.36'), -] - -configopts = '--disable-docs ' - -moduleclass = 'vis' diff --git a/Golden_Repo/f/fontconfig/fontconfig-2.13.92-GCCcore-9.3.0.eb b/Golden_Repo/f/fontconfig/fontconfig-2.13.92-GCCcore-9.3.0.eb deleted file mode 100644 index 7ba21e69644800bd99f4053022564af0bdc83113..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/fontconfig/fontconfig-2.13.92-GCCcore-9.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'fontconfig' -version = '2.13.92' - -homepage = 'http://www.freedesktop.org/software/fontconfig' -description = """Fontconfig is a library designed to provide system-wide font configuration, customization and -application access. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://www.freedesktop.org/software/fontconfig/release/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('expat', '2.2.9'), - ('freetype', '2.10.1'), - ('gperf', '3.1'), - ('util-linux', '2.36'), -] - -configopts = '--disable-docs ' - -moduleclass = 'vis' diff --git a/Golden_Repo/f/freeglut/freeglut-3.2.1-GCCcore-10.3.0.eb b/Golden_Repo/f/freeglut/freeglut-3.2.1-GCCcore-10.3.0.eb deleted file mode 100644 index 32b906f4b635171eac4229c092c975881e06a5fc..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/freeglut/freeglut-3.2.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'freeglut' -version = '3.2.1' - -homepage = 'http://freeglut.sourceforge.net/' -description = """freeglut is a completely OpenSourced alternative to the OpenGL Utility Toolkit (GLUT) library. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = ['http://prdownloads.sourceforge.net/%(name)s'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), -] - -patches = [ - 'freeglut-3.2.1_fixgcc10.patch', -] - -sanity_check_paths = { - 'files': [('lib/libglut.a', 'lib64/libglut.a'), ('lib/libglut.%s' % SHLIB_EXT, 'lib64/libglut.%s' % SHLIB_EXT)], - 'dirs': ['include/GL'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/f/freeglut/freeglut-3.2.1-GCCcore-9.3.0.eb b/Golden_Repo/f/freeglut/freeglut-3.2.1-GCCcore-9.3.0.eb deleted file mode 100644 index 5cb29646d825fb5bab65637e64aff330441427cc..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/freeglut/freeglut-3.2.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'freeglut' -version = '3.2.1' - -homepage = 'http://freeglut.sourceforge.net/' -description = """freeglut is a completely OpenSourced alternative to the OpenGL Utility Toolkit (GLUT) library. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = ['http://prdownloads.sourceforge.net/%(name)s'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), -] - -sanity_check_paths = { - 'files': [('lib/libglut.a', 'lib64/libglut.a'), ('lib/libglut.%s' % SHLIB_EXT, 'lib64/libglut.%s' % SHLIB_EXT)], - 'dirs': ['include/GL'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/f/freeglut/freeglut-3.2.1_fixgcc10.patch b/Golden_Repo/f/freeglut/freeglut-3.2.1_fixgcc10.patch deleted file mode 100644 index d507a179521f38d02bb5c1178305c98e97f6ddf3..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/freeglut/freeglut-3.2.1_fixgcc10.patch +++ /dev/null @@ -1,46 +0,0 @@ ---- a/src/fg_gl2.c -+++ b/src/fg_gl2.c -@@ -27,6 +27,18 @@ - #include "fg_internal.h" - #include "fg_gl2.h" - -+#ifdef GL_ES_VERSION_2_0 -+/* Use existing functions on GLES 2.0 */ -+#else -+FGH_PFNGLGENBUFFERSPROC fghGenBuffers; -+FGH_PFNGLDELETEBUFFERSPROC fghDeleteBuffers; -+FGH_PFNGLBINDBUFFERPROC fghBindBuffer; -+FGH_PFNGLBUFFERDATAPROC fghBufferData; -+FGH_PFNGLENABLEVERTEXATTRIBARRAYPROC fghEnableVertexAttribArray; -+FGH_PFNGLDISABLEVERTEXATTRIBARRAYPROC fghDisableVertexAttribArray; -+FGH_PFNGLVERTEXATTRIBPOINTERPROC fghVertexAttribPointer; -+#endif -+ - void FGAPIENTRY glutSetVertexAttribCoord3(GLint attrib) { - if (fgStructure.CurrentWindow != NULL) - fgStructure.CurrentWindow->Window.attribute_v_coord = attrib; -diff --git a/freeglut/freeglut/src/fg_gl2.h b/freeglut/freeglut/src/fg_gl2.h -index ab8ba5c7..fb3d4676 100644 ---- a/src/fg_gl2.h -+++ b/src/fg_gl2.h -@@ -67,13 +67,13 @@ typedef void (APIENTRY *FGH_PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); - typedef void (APIENTRY *FGH_PFNGLDISABLEVERTEXATTRIBARRAYPROC) (GLuint); - typedef void (APIENTRY *FGH_PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const GLvoid *pointer); - --FGH_PFNGLGENBUFFERSPROC fghGenBuffers; --FGH_PFNGLDELETEBUFFERSPROC fghDeleteBuffers; --FGH_PFNGLBINDBUFFERPROC fghBindBuffer; --FGH_PFNGLBUFFERDATAPROC fghBufferData; --FGH_PFNGLENABLEVERTEXATTRIBARRAYPROC fghEnableVertexAttribArray; --FGH_PFNGLDISABLEVERTEXATTRIBARRAYPROC fghDisableVertexAttribArray; --FGH_PFNGLVERTEXATTRIBPOINTERPROC fghVertexAttribPointer; -+extern FGH_PFNGLGENBUFFERSPROC fghGenBuffers; -+extern FGH_PFNGLDELETEBUFFERSPROC fghDeleteBuffers; -+extern FGH_PFNGLBINDBUFFERPROC fghBindBuffer; -+extern FGH_PFNGLBUFFERDATAPROC fghBufferData; -+extern FGH_PFNGLENABLEVERTEXATTRIBARRAYPROC fghEnableVertexAttribArray; -+extern FGH_PFNGLDISABLEVERTEXATTRIBARRAYPROC fghDisableVertexAttribArray; -+extern FGH_PFNGLVERTEXATTRIBPOINTERPROC fghVertexAttribPointer; - - # endif - diff --git a/Golden_Repo/f/freetype/freetype-2.10.1-GCCcore-10.3.0.eb b/Golden_Repo/f/freetype/freetype-2.10.1-GCCcore-10.3.0.eb deleted file mode 100644 index 0d96f18981b642398adb6d3abfda55b17248571d..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/freetype/freetype-2.10.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'freetype' -version = '2.10.1' - -homepage = 'http://freetype.org' -description = """FreeType 2 is a software font engine that is designed to be small, efficient, highly customizable, and - portable while capable of producing high-quality output (glyph images). It can be used in graphics libraries, display - servers, font conversion tools, text image generation tools, and many other products as well. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SAVANNAH_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('bzip2', '1.0.8'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -configopts = '--enable-freetype-config --with-harfbuzz=no' - -sanity_check_paths = { - 'files': ['bin/freetype-config', 'lib/libfreetype.a', 'lib/libfreetype.%s' % SHLIB_EXT, - 'lib/pkgconfig/freetype2.pc'], - 'dirs': ['include/freetype2'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/f/freetype/freetype-2.10.1-GCCcore-9.3.0.eb b/Golden_Repo/f/freetype/freetype-2.10.1-GCCcore-9.3.0.eb deleted file mode 100644 index 113080252abc7864b38778880833a241290ea9d4..0000000000000000000000000000000000000000 --- a/Golden_Repo/f/freetype/freetype-2.10.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'freetype' -version = '2.10.1' - -homepage = 'http://freetype.org' -description = """FreeType 2 is a software font engine that is designed to be small, efficient, highly customizable, and - portable while capable of producing high-quality output (glyph images). It can be used in graphics libraries, display - servers, font conversion tools, text image generation tools, and many other products as well. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SAVANNAH_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('bzip2', '1.0.8'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -configopts = '--enable-freetype-config --with-harfbuzz=no' - -sanity_check_paths = { - 'files': ['bin/freetype-config', 'lib/libfreetype.a', 'lib/libfreetype.%s' % SHLIB_EXT, - 'lib/pkgconfig/freetype2.pc'], - 'dirs': ['include/freetype2'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GCC/GCC-10.3.0.eb b/Golden_Repo/g/GCC/GCC-10.3.0.eb deleted file mode 100644 index 8b86ad75f299d4ea13587293ec1ee4af2b4c3641..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCC/GCC-10.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'Bundle' - -name = 'GCC' -version = '10.3.0' - -homepage = 'https://gcc.gnu.org/' -description = """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, Java, and Ada, -as well as libraries for these languages (libstdc++, libgcj,...). -This module supports NVPTX offloading support. If you want to use this you have to load the CUDA module. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -dependencies = [ - ('GCCcore', version), - # binutils built on top of GCCcore, which was built on top of (dummy-built) binutils - ('binutils', '2.36.1', '', ('GCCcore', version)), -] - -altroot = 'GCCcore' -altversion = 'GCCcore' - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' diff --git a/Golden_Repo/g/GCC/GCC-9.3.0.eb b/Golden_Repo/g/GCC/GCC-9.3.0.eb deleted file mode 100644 index 29aa6c96e10b3da2886bd9998cc4f4c5fe1b33e4..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCC/GCC-9.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'Bundle' - -name = 'GCC' -version = '9.3.0' - -homepage = 'http://gcc.gnu.org/' -description = """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, Java, and Ada, -as well as libraries for these languages (libstdc++, libgcj,...). -This module supports NVPTX offloading support. If you want to use this you have to load the CUDA module. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -dependencies = [ - ('GCCcore', version), - # binutils built on top of GCCcore, which was built on top of (dummy-built) binutils - ('binutils', '2.34', '', ('GCCcore', version)), -] - -altroot = 'GCCcore' -altversion = 'GCCcore' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = 'family("compiler")' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -# this bundle serves as a compiler toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' diff --git a/Golden_Repo/g/GCCcore/GCCcore-10.3.0.eb b/Golden_Repo/g/GCCcore/GCCcore-10.3.0.eb deleted file mode 100644 index 2ea512f0e1b448e3a874f3c4ae158b70d5928844..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCCcore/GCCcore-10.3.0.eb +++ /dev/null @@ -1,75 +0,0 @@ -easyblock = 'EB_GCC' - -name = 'GCCcore' -version = '10.3.0' - -homepage = 'https://gcc.gnu.org/' -description = """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, Java, and Ada, -as well as libraries for these languages (libstdc++, libgcj,...). -This module supports NVPTX offloading support. If you want to use this you have to load the CUDA module. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = [ - # GCC auto-resolving HTTP mirror - 'https://ftpmirror.gnu.org/gnu/gcc/gcc-%(version)s', - 'https://ftpmirror.gnu.org/gnu/gmp', # idem for GMP - 'https://ftpmirror.gnu.org/gnu/mpfr', # idem for MPFR - 'https://ftpmirror.gnu.org/gnu/mpc', # idem for MPC - 'ftp://gcc.gnu.org/pub/gcc/infrastructure/', # GCC dependencies - 'http://gcc.cybermirror.org/infrastructure/', # HTTP mirror for GCC dependencies - 'http://isl.gforge.inria.fr/', # original HTTP source for ISL - 'https://sourceware.org/pub/newlib/', # for newlib - 'https://github.com/MentorEmbedded/nvptx-tools/archive', # for nvptx-tools -] -sources = [ - 'gcc-%(version)s.tar.gz', - 'gmp-6.2.1.tar.bz2', - 'mpfr-4.1.0.tar.bz2', - 'mpc-1.2.1.tar.gz', - 'isl-0.23.tar.bz2', - 'newlib-4.1.0.tar.gz', - {'download_filename': 'd0524fb.tar.gz', - 'filename': 'nvptx-tools-20210115.tar.gz'}, -] -patches = [ - 'GCCcore-6.2.0-fix-find-isl.patch', - 'GCCcore-9.3.0_gmp-c99.patch', - 'gcc_10.3.0_pt.patch', # https://github.com/NVIDIA/nccl/issues/494 -] -checksums = [ - '8fcf994811ad4e5c7ac908e8cf62af2c1982319e5551f62ae72016064dacdf16', # gcc-10.3.0.tar.gz - 'eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c', # gmp-6.2.1.tar.bz2 - 'feced2d430dd5a97805fa289fed3fc8ff2b094c02d05287fd6133e7f1f0ec926', # mpfr-4.1.0.tar.bz2 - '17503d2c395dfcf106b622dc142683c1199431d095367c6aacba6eec30340459', # mpc-1.2.1.tar.gz - 'c58922c14ae7d0791a77932f377840890f19bc486b653fa64eba7f1026fb214d', # isl-0.23.tar.bz2 - 'f296e372f51324224d387cc116dc37a6bd397198756746f93a2b02e9a5d40154', # newlib-4.1.0.tar.gz - # nvptx-tools-20210115.tar.gz - '466abe1cef9cf294318ecb3c221593356f7a9e1674be987d576bc70d833d84a2', - # GCCcore-6.2.0-fix-find-isl.patch - '5ad909606d17d851c6ad629b4fddb6c1621844218b8d139fed18c502a7696c68', - # GCCcore-9.3.0_gmp-c99.patch - '0e135e1cc7cec701beea9d7d17a61bab34cfd496b4b555930016b98db99f922e', -] - -builddependencies = [ - ('M4', '1.4.18'), - ('binutils', '2.36.1'), -] - -languages = ['c', 'c++', 'fortran'] - -withisl = True -withnvptx = True - -# Perl is only required when building with NVPTX support -if withnvptx: - osdependencies = ['perl'] - -# Make sure we replace the system cc with gcc with an alias -modaliases = {'cc': 'gcc'} - -moduleclass = 'compiler' diff --git a/Golden_Repo/g/GCCcore/GCCcore-8.3.0_fix-xsmin-ppc.patch b/Golden_Repo/g/GCCcore/GCCcore-8.3.0_fix-xsmin-ppc.patch deleted file mode 100644 index 6ce9ea5352568ecbff350d290e941af592c5e021..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCCcore/GCCcore-8.3.0_fix-xsmin-ppc.patch +++ /dev/null @@ -1,74 +0,0 @@ -From 37e0df8a9be5a8232f4ccb73cdadb02121ba523f Mon Sep 17 00:00:00 2001 -From: Jiufu Guo <guojiufu@linux.ibm.com> -Date: Tue, 10 Mar 2020 13:51:57 +0800 -Subject: [PATCH] rs6000: Check -+0 and NaN for smax/smin generation - -PR93709 mentioned regressions on maxlocval_4.f90 and minlocval_f.f90 which -relates to max of '-inf' and 'nan'. This regression occur on P9 because -P9 new instruction 'xsmaxcdp' is generated. -And for C code `a < b ? b : a` is also generated as `xsmaxcdp` under -O2 -for P9. While this instruction behavior more like C/C++ semantic (a>b?a:b). - -This generates prevents 'xsmaxcdp' to be generated for those cases. -'xsmincdp' also is handled in patch. - -gcc/ -2020-03-10 Jiufu Guo <guojiufu@linux.ibm.com> - - PR target/93709 - * gcc/config/rs6000/rs6000.c (rs6000_emit_p9_fp_minmax): Check - NAN and SIGNED_ZEROR for smax/smin. - -gcc/testsuite -2020-03-10 Jiufu Guo <guojiufu@linux.ibm.com> - - PR target/93709 - * gcc.target/powerpc/p9-minmax-3.c: New test. ---- - gcc/ChangeLog | 6 ++++++ - gcc/config/rs6000/rs6000.c | 6 +++++- - gcc/testsuite/ChangeLog | 5 +++++ - gcc/testsuite/gcc.target/powerpc/p9-minmax-3.c | 17 +++++++++++++++++ - 4 files changed, 33 insertions(+), 1 deletion(-) - create mode 100644 gcc/testsuite/gcc.target/powerpc/p9-minmax-3.c - -diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c -index 848a4ef451e4..46b7dec2abd4 100644 ---- a/gcc/config/rs6000/rs6000.c -+++ b/gcc/config/rs6000/rs6000.c -@@ -14831,7 +14831,11 @@ rs6000_emit_p9_fp_minmax (rtx dest, rtx op, rtx true_cond, rtx false_cond) - if (rtx_equal_p (op0, true_cond) && rtx_equal_p (op1, false_cond)) - ; - -- else if (rtx_equal_p (op1, true_cond) && rtx_equal_p (op0, false_cond)) -+ /* Only when NaNs and signed-zeros are not in effect, smax could be -+ used for `op0 < op1 ? op1 : op0`, and smin could be used for -+ `op0 > op1 ? op1 : op0`. */ -+ else if (rtx_equal_p (op1, true_cond) && rtx_equal_p (op0, false_cond) -+ && !HONOR_NANS (compare_mode) && !HONOR_SIGNED_ZEROS (compare_mode)) - max_p = !max_p; - - else -diff --git a/gcc/testsuite/gcc.target/powerpc/p9-minmax-3.c b/gcc/testsuite/gcc.target/powerpc/p9-minmax-3.c -new file mode 100644 -index 000000000000..141603e05b43 ---- /dev/null -+++ b/gcc/testsuite/gcc.target/powerpc/p9-minmax-3.c -@@ -0,0 +1,17 @@ -+/* { dg-do compile { target { powerpc*-*-* } } } */ -+/* { dg-require-effective-target powerpc_p9vector_ok } */ -+/* { dg-options "-mdejagnu-cpu=power9 -O2 -mpower9-minmax" } */ -+/* { dg-final { scan-assembler-not "xsmaxcdp" } } */ -+/* { dg-final { scan-assembler-not "xsmincdp" } } */ -+ -+double -+dbl_max1 (double a, double b) -+{ -+ return a < b ? b : a; -+} -+ -+double -+dbl_min1 (double a, double b) -+{ -+ return a > b ? b : a; -+} diff --git a/Golden_Repo/g/GCCcore/GCCcore-9.3.0.eb b/Golden_Repo/g/GCCcore/GCCcore-9.3.0.eb deleted file mode 100644 index 9bebd5b16e58372e87f98fcf5810846607239526..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCCcore/GCCcore-9.3.0.eb +++ /dev/null @@ -1,70 +0,0 @@ -easyblock = 'EB_GCC' - -name = 'GCCcore' -version = '9.3.0' -local_newlib_version = '3.3.0' - -homepage = 'https://gcc.gnu.org/' -description = """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, Java, and Ada, -as well as libraries for these languages (libstdc++, libgcj,...). -This module supports NVPTX offloading support. If you want to use this you have to load the CUDA module. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = [ - 'https://ftpmirror.gnu.org/gnu/gcc/gcc-%(version)s', # GCC auto-resolving HTTP mirror - 'https://ftpmirror.gnu.org/gnu/gmp', # idem for GMP - 'https://ftpmirror.gnu.org/gnu/mpfr', # idem for MPFR - 'https://ftpmirror.gnu.org/gnu/mpc', # idem for MPC - 'ftp://gcc.gnu.org/pub/gcc/infrastructure/', # GCC dependencies - 'http://gcc.cybermirror.org/infrastructure/', # HTTP mirror for GCC dependencies - 'http://isl.gforge.inria.fr/', # original HTTP source for ISL - 'https://sourceware.org/pub/newlib/', # for newlib - 'https://github.com/MentorEmbedded/nvptx-tools/archive', # for nvptx-tools -] -sources = [ - 'gcc-%(version)s.tar.gz', - 'gmp-6.2.0.tar.bz2', - 'mpfr-4.0.2.tar.bz2', - 'mpc-1.1.0.tar.gz', - 'isl-0.22.1.tar.bz2', - 'newlib-3.3.0.tar.gz', - {'download_filename': '5f6f343.tar.gz', 'filename': 'nvptx-tools-20180301.tar.gz'}, -] -patches = [ - 'GCCcore-6.2.0-fix-find-isl.patch', - 'GCCcore-8.3.0_fix-xsmin-ppc.patch', - 'GCCcore-%(version)s_gmp-c99.patch', - 'GCCcore-%(version)s_vect_broadcasts_masmintel.patch', -] -checksums = [ - '5258a9b6afe9463c2e56b9e8355b1a4bee125ca828b8078f910303bc2ef91fa6', # gcc-9.3.0.tar.gz - 'f51c99cb114deb21a60075ffb494c1a210eb9d7cb729ed042ddb7de9534451ea', # gmp-6.2.0.tar.bz2 - 'c05e3f02d09e0e9019384cdd58e0f19c64e6db1fd6f5ecf77b4b1c61ca253acc', # mpfr-4.0.2.tar.bz2 - '6985c538143c1208dcb1ac42cedad6ff52e267b47e5f970183a3e75125b43c2e', # mpc-1.1.0.tar.gz - '1a668ef92eb181a7c021e8531a3ca89fd71aa1b3744db56f68365ab0a224c5cd', # isl-0.22.1.tar.bz2 - '58dd9e3eaedf519360d92d84205c3deef0b3fc286685d1c562e245914ef72c66', # newlib-3.3.0.tar.gz - 'a25b6f7761bb61c0d8e2a183bcf51fbaaeeac26868dcfc015e3b16a33fe11705', # nvptx-tools-20180301.tar.gz - '5ad909606d17d851c6ad629b4fddb6c1621844218b8d139fed18c502a7696c68', # GCCcore-6.2.0-fix-find-isl.patch - 'bea1bce8f50ea6d51b038309eb61dec00a8681fb653d211c539be80f184609a3', # GCCcore-8.3.0_fix-xsmin-ppc.patch - '0e135e1cc7cec701beea9d7d17a61bab34cfd496b4b555930016b98db99f922e', # GCCcore-9.3.0_gmp-c99.patch - 'a32ac9c7d999a8b91bf93dba6a9d81b6ff58b3c89c425ff76090cbc90076685c', # GCCcore-9.3.0_vect_broadcasts_masmintel.patch -] - -builddependencies = [ - ('M4', '1.4.18'), - ('binutils', '2.34'), -] - -languages = ['c', 'c++', 'fortran'] - -withisl = True -withnvptx = True - -# Make sure we replace the system cc with gcc with an alias -modaliases = {'cc': 'gcc'} - -moduleclass = 'compiler' diff --git a/Golden_Repo/g/GCCcore/GCCcore-9.3.0_gmp-c99.patch b/Golden_Repo/g/GCCcore/GCCcore-9.3.0_gmp-c99.patch deleted file mode 100644 index 7c4c567c8469324c43ca4e82095e4455e597ca89..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCCcore/GCCcore-9.3.0_gmp-c99.patch +++ /dev/null @@ -1,67 +0,0 @@ -add -std=c99 when building GMP, to avoid compilation errors with older GCC system compilers -author: Kenneth Hoste (HPC-UGent) ---- gcc-9.3.0-RC-20200305/Makefile.in.orig 2020-03-10 20:30:39.851898414 +0100 -+++ gcc-9.3.0-RC-20200305/Makefile.in 2020-03-10 20:33:13.011735787 +0100 -@@ -12891,7 +12891,7 @@ - s=`cd $(srcdir); ${PWD_COMMAND}`; export s; \ - $(HOST_EXPORTS) \ - (cd $(HOST_SUBDIR)/gmp && \ -- $(MAKE) $(BASE_FLAGS_TO_PASS) $(EXTRA_HOST_FLAGS) $(STAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM" \ -+ $(MAKE) $(BASE_FLAGS_TO_PASS) $(EXTRA_HOST_FLAGS) $(STAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM -std=c99" \ - $(TARGET-gmp)) - @endif gmp - -@@ -12922,7 +12922,7 @@ - CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \ - LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \ - $(EXTRA_HOST_FLAGS) \ -- $(STAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM" \ -+ $(STAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM -std=c99" \ - TFLAGS="$(STAGE1_TFLAGS)" \ - $(TARGET-stage1-gmp) - -@@ -12937,7 +12937,7 @@ - fi; \ - cd $(HOST_SUBDIR)/gmp && \ - $(MAKE) $(EXTRA_HOST_FLAGS) \ -- $(STAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM" clean -+ $(STAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM -std=c99" clean - @endif gmp-bootstrap - - -@@ -12966,7 +12966,7 @@ - CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \ - CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \ - LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \ -- $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM" \ -+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM -std=c99" \ - TFLAGS="$(STAGE2_TFLAGS)" \ - $(TARGET-stage2-gmp) - -@@ -12980,7 +12980,7 @@ - $(MAKE) stage2-start; \ - fi; \ - cd $(HOST_SUBDIR)/gmp && \ -- $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM" clean -+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM -std=c99" clean - @endif gmp-bootstrap - - -@@ -13009,7 +13009,7 @@ - CFLAGS_FOR_TARGET="$(CFLAGS_FOR_TARGET)" \ - CXXFLAGS_FOR_TARGET="$(CXXFLAGS_FOR_TARGET)" \ - LIBCFLAGS_FOR_TARGET="$(LIBCFLAGS_FOR_TARGET)" \ -- $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM" \ -+ $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM -std=c99" \ - TFLAGS="$(STAGE3_TFLAGS)" \ - $(TARGET-stage3-gmp) - -@@ -13023,7 +13023,7 @@ - $(MAKE) stage3-start; \ - fi; \ - cd $(HOST_SUBDIR)/gmp && \ -- $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM" clean -+ $(MAKE) $(EXTRA_HOST_FLAGS) $(POSTSTAGE1_FLAGS_TO_PASS) AM_CFLAGS="-DNO_ASM -std=c99" clean - @endif gmp-bootstrap - - diff --git a/Golden_Repo/g/GCCcore/GCCcore-9.3.0_vect_broadcasts_masmintel.patch b/Golden_Repo/g/GCCcore/GCCcore-9.3.0_vect_broadcasts_masmintel.patch deleted file mode 100644 index 84aee424a2c62d1646ee0f1a446f5d310cfd9fa7..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCCcore/GCCcore-9.3.0_vect_broadcasts_masmintel.patch +++ /dev/null @@ -1,67 +0,0 @@ -From: Jakub Jelinek <jakub@redhat.com> -Date: Tue, 15 Sep 2020 07:37:48 +0000 (+0200) -Subject: i386: Fix up vector mul and div with broadcasts in -masm=intel mode -X-Git-Url: https://gcc.gnu.org/git/?p=gcc.git;a=commitdiff_plain;h=d0a094ce39fc49681b0d5cfd2ee1d232859c4824 - -i386: Fix up vector mul and div with broadcasts in -masm=intel mode - -These patterns printed bogus <>s around the {1to16} and similar strings. - -2020-09-15 Jakub Jelinek <jakub@redhat.com> - - PR target/97028 - * config/i386/sse.md (mul<mode>3<mask_name>_bcs, - <avx512>_div<mode>3<mask_name>_bcst): Use <avx512bcst> instead of - <<avx512bcst>>. - - * gcc.target/i386/avx512f-pr97028.c: Untested fix. - -(cherry picked from commit 0f079e104a8d1994b6b47169a6b45737615eb2d7) ---- - -diff --git a/gcc/config/i386/sse.md b/gcc/config/i386/sse.md -index cbcbe3846ca..3b51fef56f2 100644 ---- a/gcc/config/i386/sse.md -+++ b/gcc/config/i386/sse.md -@@ -1829,7 +1829,7 @@ - (match_operand:<ssescalarmode> 1 "memory_operand" "m")) - (match_operand:VF_AVX512 2 "register_operand" "v")))] - "TARGET_AVX512F && <mask_mode512bit_condition>" -- "vmul<ssemodesuffix>\t{%1<avx512bcst>, %2, %0<mask_operand3>|%0<mask_operand3>, %2, %1<<avx512bcst>>}" -+ "vmul<ssemodesuffix>\t{%1<avx512bcst>, %2, %0<mask_operand3>|%0<mask_operand3>, %2, %1<avx512bcst>}" - [(set_attr "prefix" "evex") - (set_attr "type" "ssemul") - (set_attr "mode" "<MODE>")]) -@@ -1899,7 +1899,7 @@ - (vec_duplicate:VF_AVX512 - (match_operand:<ssescalarmode> 2 "memory_operand" "m"))))] - "TARGET_AVX512F && <mask_mode512bit_condition>" -- "vdiv<ssemodesuffix>\t{%2<avx512bcst>, %1, %0<mask_operand3>|%0<mask_operand3>, %1, %2<<avx512bcst>>}" -+ "vdiv<ssemodesuffix>\t{%2<avx512bcst>, %1, %0<mask_operand3>|%0<mask_operand3>, %1, %2<avx512bcst>}" - [(set_attr "prefix" "evex") - (set_attr "type" "ssediv") - (set_attr "mode" "<MODE>")]) -diff --git a/gcc/testsuite/gcc.target/i386/avx512f-pr97028.c b/gcc/testsuite/gcc.target/i386/avx512f-pr97028.c -new file mode 100644 -index 00000000000..2719108a411 ---- /dev/null -+++ b/gcc/testsuite/gcc.target/i386/avx512f-pr97028.c -@@ -0,0 +1,18 @@ -+/* PR target/97028 */ -+/* { dg-do assemble { target avx512f } } */ -+/* { dg-require-effective-target masm_intel } */ -+/* { dg-options "-O2 -mavx512f -masm=intel" } */ -+ -+#include <x86intrin.h> -+ -+__m512 -+foo (__m512 x, float *y) -+{ -+ return _mm512_mul_ps (x, _mm512_set1_ps (*y)); -+} -+ -+__m512 -+bar (__m512 x, float *y) -+{ -+ return _mm512_div_ps (x, _mm512_set1_ps (*y)); -+} diff --git a/Golden_Repo/g/GCCcore/gcc_10.3.0_pt.patch b/Golden_Repo/g/GCCcore/gcc_10.3.0_pt.patch deleted file mode 100644 index 92ac8ed83a62fd17d312518216281ccf1c2c4428..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCCcore/gcc_10.3.0_pt.patch +++ /dev/null @@ -1,47 +0,0 @@ ---- gcc-10.3.0/gcc/cp/pt.c.orig 2021-07-01 15:37:50.631102114 +0200 -+++ gcc-10.3.0/gcc/cp/pt.c 2021-07-01 15:45:20.052732115 +0200 -@@ -13751,44 +13751,6 @@ - return spec; - } - -- /* We can see more levels of arguments than parameters if -- there was a specialization of a member template, like -- this: -- -- template <class T> struct S { template <class U> void f(); } -- template <> template <class U> void S<int>::f(U); -- -- Here, we'll be substituting into the specialization, -- because that's where we can find the code we actually -- want to generate, but we'll have enough arguments for -- the most general template. -- -- We also deal with the peculiar case: -- -- template <class T> struct S { -- template <class U> friend void f(); -- }; -- template <class U> void f() {} -- template S<int>; -- template void f<double>(); -- -- Here, the ARGS for the instantiation of will be {int, -- double}. But, we only need as many ARGS as there are -- levels of template parameters in CODE_PATTERN. We are -- careful not to get fooled into reducing the ARGS in -- situations like: -- -- template <class T> struct S { template <class U> void f(U); } -- template <class T> template <> void S<T>::f(int) {} -- -- which we can spot because the pattern will be a -- specialization in this case. */ -- int args_depth = TMPL_ARGS_DEPTH (args); -- int parms_depth = -- TMPL_PARMS_DEPTH (DECL_TEMPLATE_PARMS (DECL_TI_TEMPLATE (t))); -- -- if (args_depth > parms_depth && !DECL_TEMPLATE_SPECIALIZATION (t)) -- args = get_innermost_template_args (args, parms_depth); - } - else - { diff --git a/Golden_Repo/g/GCCcore/gcc_9.3.0_remove_sm_30.patch b/Golden_Repo/g/GCCcore/gcc_9.3.0_remove_sm_30.patch deleted file mode 100644 index 44a15c771cdfeb07a0379abffdd211f24fd38770..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GCCcore/gcc_9.3.0_remove_sm_30.patch +++ /dev/null @@ -1,51 +0,0 @@ -diff -ruN gcc-9.3.0.orig/gcc/config/nvptx/nvptx.c gcc-9.3.0/gcc/config/nvptx/nvptx.c ---- gcc-9.3.0.orig/gcc/config/nvptx/nvptx.c 2020-07-15 23:11:53.663897096 +0200 -+++ gcc-9.3.0/gcc/config/nvptx/nvptx.c 2020-07-15 23:14:56.885947516 +0200 -@@ -5192,10 +5192,7 @@ - { - fputs ("// BEGIN PREAMBLE\n", asm_out_file); - fputs ("\t.version\t3.1\n", asm_out_file); -- if (TARGET_SM35) -- fputs ("\t.target\tsm_35\n", asm_out_file); -- else -- fputs ("\t.target\tsm_30\n", asm_out_file); -+ fputs ("\t.target\tsm_35\n", asm_out_file); - fprintf (asm_out_file, "\t.address_size %d\n", GET_MODE_BITSIZE (Pmode)); - fputs ("// END PREAMBLE\n", asm_out_file); - } -diff -ruN gcc-9.3.0.orig/gcc/config/nvptx/nvptx.opt gcc-9.3.0/gcc/config/nvptx/nvptx.opt ---- gcc-9.3.0.orig/gcc/config/nvptx/nvptx.opt 2020-07-15 23:11:53.663897096 +0200 -+++ gcc-9.3.0/gcc/config/nvptx/nvptx.opt 2020-07-15 23:14:24.665640891 +0200 -@@ -54,9 +54,6 @@ - Known PTX ISA versions (for use with the -misa= option): - - EnumValue --Enum(ptx_isa) String(sm_30) Value(PTX_ISA_SM30) -- --EnumValue - Enum(ptx_isa) String(sm_35) Value(PTX_ISA_SM35) - - misa= -diff -ruN gcc-9.3.0.orig/gcc/testsuite/gcc.target/nvptx/atomic_fetch-2.c gcc-9.3.0/gcc/testsuite/gcc.target/nvptx/atomic_fetch-2.c ---- gcc-9.3.0.orig/gcc/testsuite/gcc.target/nvptx/atomic_fetch-2.c 2020-07-15 23:11:58.558791537 +0200 -+++ gcc-9.3.0/gcc/testsuite/gcc.target/nvptx/atomic_fetch-2.c 2020-07-15 23:25:27.883375555 +0200 -@@ -2,7 +2,7 @@ - targets. */ - - /* { dg-do compile } */ --/* { dg-options "-O2 -misa=sm_30" } */ -+/* { dg-options "-O2 -misa=sm_35" } */ - - int - main() -diff -ruN gcc-9.3.0.orig/libgomp/testsuite/libgomp.oacc-c-c++-common/subr.ptx gcc-9.3.0/libgomp/testsuite/libgomp.oacc-c-c++-common/subr.ptx ---- gcc-9.3.0.orig/libgomp/testsuite/libgomp.oacc-c-c++-common/subr.ptx 2020-07-15 23:11:52.778916181 +0200 -+++ gcc-9.3.0/libgomp/testsuite/libgomp.oacc-c-c++-common/subr.ptx 2020-07-15 23:14:10.091955141 +0200 -@@ -1,6 +1,6 @@ - // BEGIN PREAMBLE - .version 3.1 -- .target sm_30 -+ .target sm_35 - .address_size 64 - // END PREAMBLE - diff --git a/Golden_Repo/g/GDAL/GDAL-3.1.2-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/g/GDAL/GDAL-3.1.2-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index dfdaaf9e6bb3d3804cd9519c532c2689508a84a6..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GDAL/GDAL-3.1.2-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,73 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GDAL' -version = '3.1.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.gdal.org/' -description = """GDAL is a translator library for raster geospatial data -formats that is released under an X/MIT style Open Source license by the Open -Source Geospatial Foundation. As a library, it presents a single abstract data -model to the calling application for all supported formats. It also comes with -a variety of useful commandline utilities for data translation and processing. -""" - -site_contacts = 'Alexandre Strube <a.strube@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['http://download.osgeo.org/gdal/%(version)s/'] -sources = [SOURCELOWER_TAR_XZ] - -patches = ['GDAL-3.0.0_fix-python-CC-CXX.patch'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('netCDF', '4.7.4'), - ('expat', '2.2.9'), - ('GEOS', '3.8.1', versionsuffix), - ('SQLite', '3.32.3'), - ('libxml2', '2.9.10'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('LibTIFF', '4.1.0'), - ('zlib', '1.2.11'), - ('cURL', '7.71.1'), - ('PCRE', '8.44'), - ('PROJ', '7.1.0'), - ('libgeotiff', '1.6.0'), - ('HDF5', '1.10.6'), - ('OpenJPEG', '2.3.1'), -] - -configopts = ' --with-python=$EBROOTPYTHON/bin/python' -configopts += ' --with-netcdf=$EBROOTNETCDF' -configopts += ' --with-expat=$EBROOTEXPAT' -configopts += ' --with-geos=$EBROOTGEOS/bin/geos-config' -configopts += ' --with-sqlite3=$EBROOTSQLITE ' -configopts += ' --with-xml2=yes' # it was $EBROOTLIBXML2 -configopts += ' --with-png=$EBROOTLIBPNG' -configopts += ' --with-jpeg=$EBROOTLIBJPEGMINTURBO' -configopts += ' --with-libtiff=$EBROOTLIBTIFF' -configopts += ' --with-libz=$EBROOTLIBZ' -configopts += ' --with-pcre=$EBROOTPCRE' -configopts += ' --with-proj=$EBROOTPROJ' -configopts += ' --with-libgeotiff=$EBROOTLIBGEOTIFF' -configopts += ' --without-hdf4 --with-hdf5=$EBROOTHDF5' -configopts += ' --with-openjpeg=$EBROOTOPENJPEG' -configopts += ' --without-jasper' - -prebuildopts = 'export LDSHARED="$CC -shared" && ' - -modextrapaths = {'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages'} - -sanity_check_paths = { - 'files': ['lib/libgdal.a', 'lib/libgdal.%s' % SHLIB_EXT], - 'dirs': ['bin', 'include', 'lib/python%(pyshortver)s/site-packages'] -} - -sanity_check_commands = ["python -c 'import gdal'", "python -c 'from osgeo import gdal_array'"] - -moduleclass = 'data' diff --git a/Golden_Repo/g/GDAL/GDAL-3.1.2-ipsmpi-2020-Python-3.8.5.eb b/Golden_Repo/g/GDAL/GDAL-3.1.2-ipsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index 709bde7f1023c89ecd7a6681cf3ab9d6dd1dba02..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GDAL/GDAL-3.1.2-ipsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,73 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GDAL' -version = '3.1.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.gdal.org/' -description = """GDAL is a translator library for raster geospatial data -formats that is released under an X/MIT style Open Source license by the Open -Source Geospatial Foundation. As a library, it presents a single abstract data -model to the calling application for all supported formats. It also comes with -a variety of useful commandline utilities for data translation and processing. -""" - -site_contacts = 'Alexandre Strube <a.strube@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['http://download.osgeo.org/gdal/%(version)s/'] -sources = [SOURCELOWER_TAR_XZ] - -patches = ['GDAL-3.0.0_fix-python-CC-CXX.patch'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('netCDF', '4.7.4'), - ('expat', '2.2.9'), - ('GEOS', '3.8.1', versionsuffix), - ('SQLite', '3.32.3'), - ('libxml2', '2.9.10'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('LibTIFF', '4.1.0'), - ('zlib', '1.2.11'), - ('cURL', '7.71.1'), - ('PCRE', '8.44'), - ('PROJ', '7.1.0'), - ('libgeotiff', '1.6.0'), - ('HDF5', '1.10.6'), - ('OpenJPEG', '2.3.1'), -] - -configopts = ' --with-python=$EBROOTPYTHON/bin/python' -configopts += ' --with-netcdf=$EBROOTNETCDF' -configopts += ' --with-expat=$EBROOTEXPAT' -configopts += ' --with-geos=$EBROOTGEOS/bin/geos-config' -configopts += ' --with-sqlite3=$EBROOTSQLITE ' -configopts += ' --with-xml2=yes' # it was $EBROOTLIBXML2 -configopts += ' --with-png=$EBROOTLIBPNG' -configopts += ' --with-jpeg=$EBROOTLIBJPEGMINTURBO' -configopts += ' --with-libtiff=$EBROOTLIBTIFF' -configopts += ' --with-libz=$EBROOTLIBZ' -configopts += ' --with-pcre=$EBROOTPCRE' -configopts += ' --with-proj=$EBROOTPROJ' -configopts += ' --with-libgeotiff=$EBROOTLIBGEOTIFF' -configopts += ' --without-hdf4 --with-hdf5=$EBROOTHDF5' -configopts += ' --with-openjpeg=$EBROOTOPENJPEG' -configopts += ' --without-jasper' - -prebuildopts = 'export LDSHARED="$CC -shared" && ' - -modextrapaths = {'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages'} - -sanity_check_paths = { - 'files': ['lib/libgdal.a', 'lib/libgdal.%s' % SHLIB_EXT], - 'dirs': ['bin', 'include', 'lib/python%(pyshortver)s/site-packages'] -} - -sanity_check_commands = ["python -c 'import gdal'", "python -c 'from osgeo import gdal_array'"] - -moduleclass = 'data' diff --git a/Golden_Repo/g/GDB/GDB-10.1-GCCcore-10.3.0.eb b/Golden_Repo/g/GDB/GDB-10.1-GCCcore-10.3.0.eb deleted file mode 100644 index 3011d310a3b9b88721d7f0cf35fee182c2b1af01..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GDB/GDB-10.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GDB' -version = '10.1' - -homepage = 'http://www.gnu.org/software/gdb/gdb.html' -description = "The GNU Project Debugger" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -site_contacts = 'sc@fz-juelich.de' - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('texinfo', '6.7'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('libreadline', '8.0'), - ('ncurses', '6.2'), - ('expat', '2.2.9'), -] - -configopts = '--with-system-zlib --with-zlib=$EBROOTZLIB --with-expat=$EBROOTEXPAT ' -configopts += '--with-system-readline --enable-tui --enable-plugins' - -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/gdb', 'bin/gdbserver'], - 'dirs': [], -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/g/GDB/GDB-10.1-GCCcore-9.3.0.eb b/Golden_Repo/g/GDB/GDB-10.1-GCCcore-9.3.0.eb deleted file mode 100644 index 6c99d267686837ff006f66a3f40860300fb3bc43..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GDB/GDB-10.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GDB' -version = '10.1' - -homepage = 'http://www.gnu.org/software/gdb/gdb.html' -description = "The GNU Project Debugger" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -site_contacts = 'sc@fz-juelich.de' - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('texinfo', '6.7'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('libreadline', '8.0'), - ('ncurses', '6.2'), - ('expat', '2.2.9'), -] - -configopts = '--with-system-zlib --with-zlib=$EBROOTZLIB --with-expat=$EBROOTEXPAT ' -configopts += '--with-system-readline --enable-tui --enable-plugins' - -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/gdb', 'bin/gdbserver'], - 'dirs': [], -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/g/GEOS/GEOS-3.8.1-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/g/GEOS/GEOS-3.8.1-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 1ee5d79195182e6e73bb22a2ac0e3ebb139bac01..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GEOS/GEOS-3.8.1-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GEOS' -version = '3.8.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://trac.osgeo.org/geos' -description = """GEOS (Geometry Engine - Open Source) is a C++ port of the - Java Topology Suite (JTS)""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://download.osgeo.org/geos/'] -sources = [SOURCELOWER_TAR_BZ2] - -patches = [ - 'GEOS-3.6.2_fix-Python3.patch' -] - -builddependencies = [ - ('SWIG', '4.0.2', versionsuffix), - ('binutils', '2.36.1') -] - -dependencies = [ - ('Python', '3.8.5') -] - -configopts = '--enable-python' - -modextrapaths = {'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages'} - -sanity_check_paths = { - 'files': ['bin/geos-config', 'lib/libgeos.%s' % SHLIB_EXT, 'lib/libgeos.a', - 'include/geos.h'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/geos'], -} - -moduleclass = 'math' diff --git a/Golden_Repo/g/GEOS/GEOS-3.8.1-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/g/GEOS/GEOS-3.8.1-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index dca005611cdd98ea9cedac42e50796fb672cbe80..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GEOS/GEOS-3.8.1-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GEOS' -version = '3.8.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://trac.osgeo.org/geos' -description = """GEOS (Geometry Engine - Open Source) is a C++ port of the - Java Topology Suite (JTS)""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://download.osgeo.org/geos/'] -sources = [SOURCELOWER_TAR_BZ2] - -patches = [ - 'GEOS-3.6.2_fix-Python3.patch' -] - -builddependencies = [ - ('SWIG', '4.0.2', versionsuffix), - ('binutils', '2.34') -] - -dependencies = [ - ('Python', '3.8.5') -] - -configopts = '--enable-python' - -modextrapaths = {'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages'} - -sanity_check_paths = { - 'files': ['bin/geos-config', 'lib/libgeos.%s' % SHLIB_EXT, 'lib/libgeos.a', - 'include/geos.h'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/geos'], -} - -moduleclass = 'math' diff --git a/Golden_Repo/g/GL2PS/GL2PS-1.4.2-GCCcore-10.3.0.eb b/Golden_Repo/g/GL2PS/GL2PS-1.4.2-GCCcore-10.3.0.eb deleted file mode 100644 index 0e3aa4815302f2c16d5e93078f3931ceb3c15065..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GL2PS/GL2PS-1.4.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'GL2PS' -version = '1.4.2' - -homepage = 'http://www.geuz.org/gl2ps/' -description = """GL2PS: an OpenGL to PostScript printing library -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://geuz.org/gl2ps/src/'] -sources = [SOURCELOWER_TGZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('texinfo', '6.7'), -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -sanity_check_paths = { - 'files': ['include/gl2ps.h', 'lib/libgl2ps.so'], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GL2PS/GL2PS-1.4.2-GCCcore-9.3.0.eb b/Golden_Repo/g/GL2PS/GL2PS-1.4.2-GCCcore-9.3.0.eb deleted file mode 100644 index 1f453c453d90f32cb550e97257595ac5af3c3e3f..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GL2PS/GL2PS-1.4.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'GL2PS' -version = '1.4.2' - -homepage = 'http://www.geuz.org/gl2ps/' -description = """GL2PS: an OpenGL to PostScript printing library -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://geuz.org/gl2ps/src/'] -sources = [SOURCELOWER_TGZ] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('texinfo', '6.7'), -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -sanity_check_paths = { - 'files': ['include/gl2ps.h', 'lib/libgl2ps.so'], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GLM/GLM-0.9.9.8-GCCcore-10.3.0.eb b/Golden_Repo/g/GLM/GLM-0.9.9.8-GCCcore-10.3.0.eb deleted file mode 100644 index da486023e1a6994a526de9cd232b445a67fce88d..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GLM/GLM-0.9.9.8-GCCcore-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'GLM' -version = '0.9.9.8' - -homepage = 'https://github.com/g-truc/glm' -description = """OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on - the OpenGL Shading Language (GLSL) specifications.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/g-truc/glm/archive/'] -sources = ['%(version)s.tar.gz'] - -patches = [ - 'GLM-0.9.9.8_fix_missing_install.patch', -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['include/glm/', 'include/glm/gtc', 'include/glm/gtx'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/g/GLM/GLM-0.9.9.8-GCCcore-9.3.0.eb b/Golden_Repo/g/GLM/GLM-0.9.9.8-GCCcore-9.3.0.eb deleted file mode 100644 index 318b505392627487c61bdf0b01810627947d0920..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GLM/GLM-0.9.9.8-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'GLM' -version = '0.9.9.8' - -homepage = 'https://github.com/g-truc/glm' -description = """OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on - the OpenGL Shading Language (GLSL) specifications.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/g-truc/glm/archive/'] -sources = ['%(version)s.tar.gz'] - -patches = [ - 'GLM-0.9.9.8_fix_missing_install.patch', -] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['include/glm/', 'include/glm/gtc', 'include/glm/gtx'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/g/GLM/GLM-0.9.9.8_fix_missing_install.patch b/Golden_Repo/g/GLM/GLM-0.9.9.8_fix_missing_install.patch deleted file mode 100644 index cc9f8fcba8587316e67a56d71d1bd21f8405ca80..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GLM/GLM-0.9.9.8_fix_missing_install.patch +++ /dev/null @@ -1,15 +0,0 @@ -Restore installation functionality which was removed in v0.9.9.6 - -Åke Sandgren, 20200519 -diff -ru glm-0.9.9.8.orig/CMakeLists.txt glm-0.9.9.8/CMakeLists.txt ---- glm-0.9.9.8.orig/CMakeLists.txt 2020-04-13 19:41:16.000000000 +0200 -+++ glm-0.9.9.8/CMakeLists.txt 2020-05-19 16:16:12.731259305 +0200 -@@ -8,6 +8,8 @@ - add_subdirectory(glm) - add_library(glm::glm ALIAS glm) - -+install(DIRECTORY glm DESTINATION "include") -+ - if(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_CURRENT_SOURCE_DIR}) - - add_subdirectory(test) diff --git a/Golden_Repo/g/GLPK/GLPK-4.65-GCCcore-10.3.0.eb b/Golden_Repo/g/GLPK/GLPK-4.65-GCCcore-10.3.0.eb deleted file mode 100644 index f1d8e9b022e00c255bc7631e14ec0f971272a8c5..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GLPK/GLPK-4.65-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GLPK' -version = '4.65' - -homepage = 'https://www.gnu.org/software/glpk/' -description = """The GLPK (GNU Linear Programming Kit) package is intended for solving large-scale linear programming -(LP), mixed integer programming (MIP), and other related problems. It is a set of routines written in ANSI C and -organized in the form of a callable library. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://ftp.gnu.org/gnu/%(namelower)s/'] - -builddependencies = [ - ('binutils', '2.36.1') -] - -dependencies = [ - ('GMP', '6.2.0') -] - -configopts = "--with-gmp" - -sanity_check_paths = { - 'files': ['bin/glpsol', 'include/glpk.h'] + - ['lib/libglpk.%s' % x for x in [SHLIB_EXT, 'a']], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/g/GLPK/GLPK-4.65-GCCcore-9.3.0.eb b/Golden_Repo/g/GLPK/GLPK-4.65-GCCcore-9.3.0.eb deleted file mode 100644 index c2dbb0620a23e40b83578e740c329a988118c3ce..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GLPK/GLPK-4.65-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GLPK' -version = '4.65' - -homepage = 'https://www.gnu.org/software/glpk/' -description = """The GLPK (GNU Linear Programming Kit) package is intended for solving large-scale linear programming -(LP), mixed integer programming (MIP), and other related problems. It is a set of routines written in ANSI C and -organized in the form of a callable library. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://ftp.gnu.org/gnu/%(namelower)s/'] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('GMP', '6.2.0') -] - -configopts = "--with-gmp" - -sanity_check_paths = { - 'files': ['bin/glpsol', 'include/glpk.h'] + - ['lib/libglpk.%s' % x for x in [SHLIB_EXT, 'a']], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/g/GLib/GLib-2.64.4-GCCcore-10.3.0.eb b/Golden_Repo/g/GLib/GLib-2.64.4-GCCcore-10.3.0.eb deleted file mode 100644 index d280bcc50a19993fc35466737d8fd607f5291b0b..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GLib/GLib-2.64.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'GLib' -version = '2.64.4' - -homepage = 'http://www.gtk.org/' -description = "GLib is one of the base libraries of the GTK+ project" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -local_pyver = '3.8.5' - -builddependencies = [ - ('binutils', '2.36.1'), - ('Python', local_pyver), - ('Meson', '0.55.0', '-Python-%s' % '3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2') -] - -dependencies = [ - ('libffi', '3.3'), - ('gettext', '0.20.2'), - ('libxml2', '2.9.10'), - ('PCRE', '8.44'), - ('util-linux', '2.36') -] - -configopts = '-Ddefault_library=both' - -postinstallcmds = ["sed -i -e 's|#!.*python|#!/usr/bin/env python|' %(installdir)s/bin/*"] - -modextrapaths = { - 'CPATH': ['include/gio-unix-2.0', 'include/glib-2.0'], -} - -sanity_check_paths = { - 'files': ['lib64/libglib-%(version_major)s.0.a', 'lib64/libglib-%%(version_major)s.0.%s' % SHLIB_EXT], - 'dirs': ['bin', 'include'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GLib/GLib-2.64.4-GCCcore-9.3.0.eb b/Golden_Repo/g/GLib/GLib-2.64.4-GCCcore-9.3.0.eb deleted file mode 100644 index 44f1f5972a075afc66dcd34a84ff76d8121b66c0..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GLib/GLib-2.64.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'GLib' -version = '2.64.4' - -homepage = 'http://www.gtk.org/' -description = "GLib is one of the base libraries of the GTK+ project" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -local_pyver = '3.8.5' - -builddependencies = [ - ('binutils', '2.34'), - ('Python', local_pyver), - ('Meson', '0.55.0', '-Python-%s' % '3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2') -] - -dependencies = [ - ('libffi', '3.3'), - ('gettext', '0.20.2'), - ('libxml2', '2.9.10'), - ('PCRE', '8.44'), - ('util-linux', '2.36') -] - -configopts = '-Ddefault_library=both' - -postinstallcmds = ["sed -i -e 's|#!.*python|#!/usr/bin/env python|' %(installdir)s/bin/*"] - -modextrapaths = { - 'CPATH': ['include/gio-unix-2.0', 'include/glib-2.0'], -} - -sanity_check_paths = { - 'files': ['lib64/libglib-%(version_major)s.0.a', 'lib64/libglib-%%(version_major)s.0.%s' % SHLIB_EXT], - 'dirs': ['bin', 'include'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GMP/GMP-6.2.0-GCCcore-10.3.0.eb b/Golden_Repo/g/GMP/GMP-6.2.0-GCCcore-10.3.0.eb deleted file mode 100644 index 942c2bc4bc19406bc35745f09eadc6eddf353955..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GMP/GMP-6.2.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GMP' -version = '6.2.0' - -homepage = 'http://gmplib.org/' -description = """GMP is a free library for arbitrary precision arithmetic, -operating on signed integers, rational numbers, and floating point numbers. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True, 'precise': True} - -sources = [SOURCELOWER_TAR_BZ2] -source_urls = ['http://ftp.gnu.org/gnu/gmp'] - -builddependencies = [ - ('Autotools', '20200321'), - ('binutils', '2.36.1'), -] - -# enable C++ interface -configopts = '--enable-cxx' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libgmp.%s' % SHLIB_EXT, 'include/gmp.h'], - 'dirs': [], -} - -moduleclass = 'math' diff --git a/Golden_Repo/g/GMP/GMP-6.2.0-GCCcore-9.3.0.eb b/Golden_Repo/g/GMP/GMP-6.2.0-GCCcore-9.3.0.eb deleted file mode 100644 index 2eb6cb21ff63f1e3f42474de7ade9736c723eb4f..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GMP/GMP-6.2.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GMP' -version = '6.2.0' - -homepage = 'http://gmplib.org/' -description = """GMP is a free library for arbitrary precision arithmetic, -operating on signed integers, rational numbers, and floating point numbers. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True, 'precise': True} - -sources = [SOURCELOWER_TAR_BZ2] -source_urls = ['http://ftp.gnu.org/gnu/gmp'] - -builddependencies = [ - ('Autotools', '20200321'), - ('binutils', '2.34'), -] - -# enable C++ interface -configopts = '--enable-cxx' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libgmp.%s' % SHLIB_EXT, 'include/gmp.h'], - 'dirs': [], -} - -moduleclass = 'math' diff --git a/Golden_Repo/g/GObject-Introspection/GObject-Introspection-1.64.1-GCCcore-10.3.0.eb b/Golden_Repo/g/GObject-Introspection/GObject-Introspection-1.64.1-GCCcore-10.3.0.eb deleted file mode 100644 index b72ac3bf612e5b7dae914e2d05885ee12b395034..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GObject-Introspection/GObject-Introspection-1.64.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'GObject-Introspection' -version = '1.64.1' - -homepage = 'https://gi.readthedocs.io/en/latest/' -description = """GObject introspection is a middleware layer between C libraries - (using GObject) and language bindings. The C library can be scanned at - compile time and generate a metadata file, in addition to the actual - native C library. Then at runtime, language bindings can read this - metadata and automatically provide bindings to call into the C library. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2') -] - -dependencies = [ - ('GLib', '2.64.4'), - ('flex', '2.6.4'), - ('Bison', '3.7.6'), - ('cairo', '1.17.2'), - ('libffi', '3.3'), - ('Python', '3.8.5'), -] - -preconfigopts = "GI_SCANNER_DISABLE_CACHE=true " - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['bin/g-ir-%s' % x for x in ['annotation-tool', 'compiler', 'generate', 'scanner']] + - ['lib/libgirepository-1.0.' + SHLIB_EXT], - 'dirs': ['include', 'share'] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/GObject-Introspection/GObject-Introspection-1.64.1-GCCcore-9.3.0.eb b/Golden_Repo/g/GObject-Introspection/GObject-Introspection-1.64.1-GCCcore-9.3.0.eb deleted file mode 100644 index 8a4dab2d4ee4cf72240916f8edb654dee5ef40d3..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GObject-Introspection/GObject-Introspection-1.64.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'GObject-Introspection' -version = '1.64.1' - -homepage = 'https://gi.readthedocs.io/en/latest/' -description = """GObject introspection is a middleware layer between C libraries - (using GObject) and language bindings. The C library can be scanned at - compile time and generate a metadata file, in addition to the actual - native C library. Then at runtime, language bindings can read this - metadata and automatically provide bindings to call into the C library. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2') -] - -dependencies = [ - ('GLib', '2.64.4'), - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('cairo', '1.17.2'), - ('libffi', '3.3'), - ('Python', '3.8.5'), -] - -preconfigopts = "GI_SCANNER_DISABLE_CACHE=true " - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['bin/g-ir-%s' % x for x in ['annotation-tool', 'compiler', 'generate', 'scanner']] + - ['lib/libgirepository-1.0.' + SHLIB_EXT], - 'dirs': ['include', 'share'] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/GPAW-setups/GPAW-setups-0.9.20000.eb b/Golden_Repo/g/GPAW-setups/GPAW-setups-0.9.20000.eb deleted file mode 100644 index 2eddd4a4143883563b3e5d4e4a2fd09b1c8f2092..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GPAW-setups/GPAW-setups-0.9.20000.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'Tarball' - -name = 'GPAW-setups' -version = '0.9.20000' - -homepage = 'https://wiki.fysik.dtu.dk/gpaw/' -description = """PAW setup for the GPAW Density Functional Theory package. -Users can install setups manually using 'gpaw install-data' or use setups from this package. -The versions of GPAW and GPAW-setups can be intermixed.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM -source_urls = ['https://wiki.fysik.dtu.dk/gpaw-files/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6c71682be12a41e17909f65fd7c1a2e4a6a7becb63fbeed2f0f3a1616d6fd41f'] - -modextrapaths = {'GPAW_SETUP_PATH': ''} - -moduleclass = 'chem' - -sanity_check_paths = { - 'files': ['H.LDA.gz'], - 'dirs': [] -} diff --git a/Golden_Repo/g/GPAW/GPAW-20.1.0-Add-Easybuild-configuration-files.patch b/Golden_Repo/g/GPAW/GPAW-20.1.0-Add-Easybuild-configuration-files.patch deleted file mode 100644 index 551d7ead70d4929c24fc8d702311b44d35844dc5..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GPAW/GPAW-20.1.0-Add-Easybuild-configuration-files.patch +++ /dev/null @@ -1,126 +0,0 @@ -From 32af009dde2d69e840e09994dfc955480d9a251f Mon Sep 17 00:00:00 2001 -From: Jakob Schiotz <schiotz@fysik.dtu.dk> -Date: Wed, 5 Feb 2020 15:08:15 +0100 -Subject: [PATCH] Add Easybuild configuration files. - -These files are made for the foss/2019b and intel/2019b toolchains, -but are expected to work for future toolchains, too. The -config-foss.py files will probably also work for other toolchains than -foss and intel, as long as it does not use Intel Math Kernel Library. - -The special config file is necessary for the Intel toolchain as the -EasyBuild system reports FFT libraries that are build without -fPIC, -but we need to link to versions compiled *with* -fPIC when building -shared objects. ---- - doc/platforms/Linux/EasyBuild/config_foss.py | 49 +++++++++++++++++++++++++++ - doc/platforms/Linux/EasyBuild/config_intel.py | 37 ++++++++++++++++++++ - 2 files changed, 86 insertions(+) - create mode 100644 doc/platforms/Linux/EasyBuild/config_foss.py - create mode 100644 doc/platforms/Linux/EasyBuild/config_intel.py - -diff --git a/doc/platforms/Linux/EasyBuild/config_foss.py b/doc/platforms/Linux/EasyBuild/config_foss.py -new file mode 100644 -index 0000000..5be60fa ---- /dev/null -+++ b/doc/platforms/Linux/EasyBuild/config_foss.py -@@ -0,0 +1,53 @@ -+# Convert static library specs from EasyBuild to GPAW -+def static_eblibs_to_gpawlibs(lib_specs): -+ return [libfile[3:-2] for libfile in os.getenv(lib_specs).split(',')] -+ -+# Clean out any autodetected things, we only want the EasyBuild -+# definitions to be used. -+libraries = [] -+include_dirs = [] -+ -+# Use EasyBuild fftw from the active toolchain -+fftw = os.getenv('FFT_STATIC_LIBS') -+if fftw: -+ libraries += static_eblibs_to_gpawlibs('FFT_STATIC_LIBS') -+ -+# Use ScaLAPACK from the active toolchain -+scalapack = os.getenv('SCALAPACK_STATIC_LIBS') -+if scalapack: -+ libraries += static_eblibs_to_gpawlibs('SCALAPACK_STATIC_LIBS') -+ -+# Add EasyBuild LAPACK/BLAS libs -+lapack = os.getenv('LAPACK_STATIC_LIBS') -+if lapack: -+ libraries += static_eblibs_to_gpawlibs('LAPACK_STATIC_LIBS') -+blas = os.getenv('BLAS_STATIC_LIBS') -+if blas: -+ libraries += static_eblibs_to_gpawlibs('BLAS_STATIC_LIBS') -+ -+# LibXC: -+# Use EasyBuild libxc -+libxc = os.getenv('EBROOTLIBXC') -+if libxc: -+ include_dirs.append(os.path.join(libxc, 'include')) -+ libraries.append('xc') -+ -+# libvdwxc: -+# Use EasyBuild libvdwxc -+libvdwxc = os.getenv('EBROOTLIBVDWXC') -+if libvdwxc: -+ include_dirs.append(os.path.join(libvdwxc, 'include')) -+ libraries.append('vdwxc') -+ -+# ELPA: -+# Use EasyBuild ELPA if loaded -+elpa = os.getenv('EBROOTELPA') -+if elpa: -+ libraries += ['elpa'] -+ elpaversion = os.path.basename(elpa).split('-')[0] -+ library_dirs.append(os.path.join(elpa, 'lib')) -+ extra_link_args += ['-Wl,-rpath={}/lib'.format(elpa)] -+ include_dirs.append(os.path.join(elpa, 'include', 'elpa-'+elpaversion)) -+ -+# Now add a EasyBuild "cover-all-bases" library_dirs -+library_dirs = os.getenv('LD_LIBRARY_PATH').split(':') -diff --git a/doc/platforms/Linux/EasyBuild/config_intel.py b/doc/platforms/Linux/EasyBuild/config_intel.py -new file mode 100644 -index 0000000..4038363 ---- /dev/null -+++ b/doc/platforms/Linux/EasyBuild/config_intel.py -@@ -0,0 +1,37 @@ -+mpicompiler = 'mpiicc' -+ -+# FFTW should be configured from environment variables, but they do -+# not report the correct names for a dynamically loaded library. -+fftw = True -+# Use Intel MKL -+libraries += ['mkl_sequential','mkl_core', 'fftw3xc_intel_pic', 'mkl_rt', ] -+ -+# Use EasyBuild scalapack from the active toolchain -+scalapack = True -+libraries += ['mkl_scalapack_lp64', 'mkl_blacs_intelmpi_lp64'] -+ -+# Use EasyBuild libxc -+libxc = os.getenv('EBROOTLIBXC') -+include_dirs.append(os.path.join(libxc, 'include')) -+ -+# libvdwxc: -+# Use EasyBuild libvdwxc -+# NOTE: This currenlty does not work together with the Intel MKL, so -+# the easyconfig files does not load libvdwxc -+libvdwxc = os.getenv('EBROOTLIBVDWXC') -+if libvdwxc: -+ include_dirs.append(os.path.join(libvdwxc, 'include')) -+ libraries.append('vdwxc') -+ -+# ELPA: -+# Use EasyBuild ELPA if loaded -+elpa = os.getenv('EBROOTELPA') -+if elpa: -+ libraries += ['elpa'] -+ elpaversion = os.path.basename(elpa).split('-')[0] -+ library_dirs.append(os.path.join(elpa, 'lib')) -+ extra_link_args += ['-Wl,-rpath={}/lib'.format(elpa)] -+ include_dirs.append(os.path.join(elpa, 'include', 'elpa-'+elpaversion)) -+ -+# Now add a EasyBuild "cover-all-bases" library_dirs -+library_dirs = os.getenv('LD_LIBRARY_PATH').split(':') --- -1.8.3.1 - diff --git a/Golden_Repo/g/GPAW/GPAW-20.1.0-Wrap-pragma-omp-simd-in-ifdef-_OPENMP-blocks.patch b/Golden_Repo/g/GPAW/GPAW-20.1.0-Wrap-pragma-omp-simd-in-ifdef-_OPENMP-blocks.patch deleted file mode 100644 index be5db000ac5838788defaac5a8f0b5967ca63fea..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GPAW/GPAW-20.1.0-Wrap-pragma-omp-simd-in-ifdef-_OPENMP-blocks.patch +++ /dev/null @@ -1,66 +0,0 @@ -From 8bbfb3a6aae83e8de3c5c4bcbcd473b0a2a77852 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Jens=20J=C3=B8rgen=20Mortensen?= <jensj@fysik.dtu.dk> -Date: Mon, 3 Feb 2020 13:03:37 +0100 -Subject: [PATCH] Wrap #pragma omp simd in #ifdef _OPENMP blocks - ---- - c/bmgs/fd.c | 2 ++ - c/bmgs/relax.c | 4 ++++ - c/symmetry.c | 2 ++ - 3 files changed, 8 insertions(+) - -diff --git a/c/bmgs/fd.c b/c/bmgs/fd.c -index c05e425..e75eb6a 100644 ---- a/c/bmgs/fd.c -+++ b/c/bmgs/fd.c -@@ -36,7 +36,9 @@ void *Z(bmgs_fd_worker)(void *threadarg) - - for (int i1 = 0; i1 < s->n[1]; i1++) - { -+#ifdef _OPENMP - #pragma omp simd -+#endif - for (int i2 = 0; i2 < s->n[2]; i2++) - { - T x = 0.0; -diff --git a/c/bmgs/relax.c b/c/bmgs/relax.c -index d0be905..6c95bf3 100644 ---- a/c/bmgs/relax.c -+++ b/c/bmgs/relax.c -@@ -25,7 +25,9 @@ if (relax_method == 1) - { - for (int i1 = 0; i1 < nstep[1]; i1++) - { -+#ifdef _OPENMP - #pragma omp simd -+#endif - for (int i2 = 0; i2 < nstep[2]; i2++) - { - double x = 0.0; -@@ -53,7 +55,9 @@ else - { - for (int i1 = 0; i1 < s->n[1]; i1++) - { -+#ifdef _OPENMP - #pragma omp simd -+#endif - for (int i2 = 0; i2 < s->n[2]; i2++) - { - double x = 0.0; -diff --git a/c/symmetry.c b/c/symmetry.c -index 207e82b..7db4bf2 100644 ---- a/c/symmetry.c -+++ b/c/symmetry.c -@@ -36,7 +36,9 @@ PyObject* symmetrize(PyObject *self, PyObject *args) - - const double* a_g = (const double*)PyArray_DATA(a_g_obj); - double* b_g = (double*)PyArray_DATA(b_g_obj); -+#ifdef _OPENMP - #pragma omp simd -+#endif - for (int g0 = o_c[0]; g0 < Ng0; g0++) - for (int g1 = o_c[1]; g1 < Ng1; g1++) - for (int g2 = o_c[2]; g2 < Ng2; g2++) { --- -1.8.3.1 - diff --git a/Golden_Repo/g/GPAW/GPAW-20.1.0-gpsmkl-2020-Python-3.8.5.eb b/Golden_Repo/g/GPAW/GPAW-20.1.0-gpsmkl-2020-Python-3.8.5.eb deleted file mode 100644 index 0801b3b3e9789ea0f80f72f017509934c18b24fb..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GPAW/GPAW-20.1.0-gpsmkl-2020-Python-3.8.5.eb +++ /dev/null @@ -1,54 +0,0 @@ -easyblock = "PythonPackage" - -name = 'GPAW' -version = '20.1.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://wiki.fysik.dtu.dk/gpaw/' -description = """GPAW is a density-functional theory (DFT) Python code based on the projector-augmented wave (PAW) - method and the atomic simulation environment (ASE). It uses real-space uniform grids and multigrid methods or - atom-centered basis-functions. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True, 'openmp': False} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -patches = [ - ('GPAW-20.1.0-Add-Easybuild-configuration-files.patch', 1), - ('GPAW-20.1.0-Wrap-pragma-omp-simd-in-ifdef-_OPENMP-blocks.patch', 1), -] - -dependencies = [ - ('Python', '3.8.5'), - ('libreadline', '8.0'), - ('ASE', '3.19.2', versionsuffix), - ('libxc', '4.3.4'), - ('HDF5', '1.10.6'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('libvdwxc', '0.4.0'), - ('GPAW-setups', '0.9.20000', '', SYSTEM), -] - -preconfigopts = 'export MKL_ROOT=$MKLROOT && ' - -prebuildopts = 'export LDSHARED="$CC -shared" && ' -prebuildopts += 'GPAW_CONFIG=doc/platforms/Linux/EasyBuild/config_foss.py' - -preinstallopts = prebuildopts - -sanity_check_paths = { - 'files': ['bin/gpaw%s' % x for x in ['', '-analyse-basis', '-basis', '-mpisim', '-plot-parallel-timings', - '-runscript', '-setup', '-upfplot']], - 'dirs': ['lib/python%(pyshortver)s/site-packages'] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/g/GPAW/gpaw_1.5.2_mkl_irc.patch b/Golden_Repo/g/GPAW/gpaw_1.5.2_mkl_irc.patch deleted file mode 100644 index 65dfb9df4e1ea7df3a13b11c19de0b840047a96c..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GPAW/gpaw_1.5.2_mkl_irc.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff -ruN gpaw-1.5.2.orig/config.py gpaw-1.5.2/config.py ---- gpaw-1.5.2.orig/config.py 2019-05-08 08:31:09.000000000 +0200 -+++ gpaw-1.5.2/config.py 2019-05-10 16:23:17.330520035 +0200 -@@ -154,8 +154,7 @@ - libdir = dir - break - if 'MKLROOT' in os.environ: -- libraries += ['mkl_intel_lp64', 'mkl_sequential', 'mkl_core', -- 'irc'] -+ libraries += ['mkl_intel_lp64', 'mkl_sequential', 'mkl_core', 'readline'] - elif openblas: # prefer openblas - libraries += ['openblas', 'lapack'] - library_dirs += [libdir] diff --git a/Golden_Repo/g/GPicView/GPicView-0.2.5-GCCcore-10.3.0.eb b/Golden_Repo/g/GPicView/GPicView-0.2.5-GCCcore-10.3.0.eb deleted file mode 100644 index 6772aaa2d7d73b047cac18e7d5e88064015d14db..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GPicView/GPicView-0.2.5-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GPicView' -version = '0.2.5' - -homepage = 'http://lxde.sourceforge.net/gpicview' -description = """GPicView - A Simple and Fast Image Viewer for X""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://sourceforge.net/projects/lxde/files/GPicView%20%28image%20Viewer%29/0.2.x/'] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('X11', '20200222'), - ('GTK+', '3.24.17'), - ('libjpeg-turbo', '2.0.5'), -] - -configopts = '--enable-gtk3 ' - -sanity_check_paths = { - 'files': ['bin/gpicview'], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GPicView/GPicView-0.2.5-GCCcore-9.3.0.eb b/Golden_Repo/g/GPicView/GPicView-0.2.5-GCCcore-9.3.0.eb deleted file mode 100644 index 7fd912d81a1d1980fd3f8750cc0b8159212c5bf5..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GPicView/GPicView-0.2.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GPicView' -version = '0.2.5' - -homepage = 'http://lxde.sourceforge.net/gpicview' -description = """GPicView - A Simple and Fast Image Viewer for X""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://sourceforge.net/projects/lxde/files/GPicView%20%28image%20Viewer%29/0.2.x/'] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('X11', '20200222'), - ('GTK+', '3.24.17'), - ('libjpeg-turbo', '2.0.5'), -] - -configopts = '--enable-gtk3 ' - -sanity_check_paths = { - 'files': ['bin/gpicview'], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GROMACS/GROMACS-2020.4-gpsmkl-2020.eb b/Golden_Repo/g/GROMACS/GROMACS-2020.4-gpsmkl-2020.eb deleted file mode 100644 index 3e66180645f0ffc436de682571f484d7f0f54e7b..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GROMACS/GROMACS-2020.4-gpsmkl-2020.eb +++ /dev/null @@ -1,69 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2012-2016 University of Luxembourg / LCSB, Cyprus Institute / CaSToRC, -# Ghent University / The Francis Crick Institute -# Authors:: -# * Wiktor Jurkowski <wiktor.jurkowski@gmail.com> -# * Fotis Georgatos <fotis@cern.ch> -# * George Tsouloupas <g.tsouloupas@cyi.ac.cy> -# * Kenneth Hoste <kenneth.hoste@ugent.be> -# * Adam Huffman <adam.huffman@crick.ac.uk> -# License:: MIT/GPL -## - -name = 'GROMACS' -version = '2020.4' - -homepage = 'http://www.gromacs.org' -description = """ -GROMACS is a versatile package to perform molecular dynamics, i.e. simulate the Newtonian equations -of motion for systems with hundreds to millions of particles. It is primarily designed for -biochemical molecules like proteins and lipids that have a lot of complicated bonded interactions, -but since GROMACS is extremely fast at calculating the non-bonded interactions (that usually -dominate simulations) many groups are also using it for research on non-biological systems, e.g. -polymers. - -Documentation -============= -Use `gmx` to execute GROMACS commands on a single node, for example, to prepare your run. Use -`gmx_mpi mdrun` in your job scripts with `srun`. The user documentation of GROMACS can be found at -http://manual.gromacs.org/documentation/current/user-guide/index.html. - -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = ['ftp://ftp.gromacs.org/pub/gromacs/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CMake', '3.18.0'), - ('libxml2', '2.9.10') -] - - -# Removed ('hwloc', '2.0.3') from dependencies due to runtime failures. -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = '-DCMAKE_PREFIX_PATH=$EBROOTHWLOC -DMPIEXEC_MAX_NUMPROCS="24" ' -configopts += '-DMKL_LIBRARIES="${MKLROOT}/lib/intel64/libmkl_intel_ilp64.so;' -configopts += '${MKLROOT}/lib/intel64/libmkl_sequential.so;${MKLROOT}/lib/intel64/libmkl_core.so" ' -configopts += '-DGMX_CUDA_TARGET_SM="60;70;80" ' - -mpiexec = 'srun' -mpiexec_numproc_flag = '"--gres=gpu:1 -n"' -mpi_numprocs = '24' - -runtest = False - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'bio' diff --git a/Golden_Repo/g/GROMACS/GROMACS-2020.4-intel-para-2020.eb b/Golden_Repo/g/GROMACS/GROMACS-2020.4-intel-para-2020.eb deleted file mode 100644 index f8f204c652c3acc104ad549907e3fc8ce50f8abd..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GROMACS/GROMACS-2020.4-intel-para-2020.eb +++ /dev/null @@ -1,66 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2012-2016 University of Luxembourg / LCSB, Cyprus Institute / CaSToRC, -# Ghent University / The Francis Crick Institute -# Authors:: -# * Wiktor Jurkowski <wiktor.jurkowski@gmail.com> -# * Fotis Georgatos <fotis@cern.ch> -# * George Tsouloupas <g.tsouloupas@cyi.ac.cy> -# * Kenneth Hoste <kenneth.hoste@ugent.be> -# * Adam Huffman <adam.huffman@crick.ac.uk> -# License:: MIT/GPL -## - -name = 'GROMACS' -version = '2020.4' - -homepage = 'http://www.gromacs.org' -description = """ -GROMACS is a versatile package to perform molecular dynamics, i.e. simulate the Newtonian equations -of motion for systems with hundreds to millions of particles. It is primarily designed for -biochemical molecules like proteins and lipids that have a lot of complicated bonded interactions, -but since GROMACS is extremely fast at calculating the non-bonded interactions (that usually -dominate simulations) many groups are also using it for research on non-biological systems, e.g. -polymers. - -Documentation -============= -Use `gmx` to execute GROMACS commands on a single node, for example, to prepare your run. Use -`gmx_mpi mdrun` in your job scripts with `srun`. The user documentation of GROMACS can be found at -http://manual.gromacs.org/documentation/current/user-guide/index.html. - -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = ['ftp://ftp.gromacs.org/pub/gromacs/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CMake', '3.18.0'), - ('libxml2', '2.9.10') -] - -# Removed ('hwloc', '2.0.3') from dependencies due to runtime failures. -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = '-DCMAKE_PREFIX_PATH=$EBROOTHWLOC -DMPIEXEC_MAX_NUMPROCS="24"' -configopts += '-DGMX_CUDA_TARGET_SM="60;70;80" ' - -mpiexec = 'srun' -mpiexec_numproc_flag = '"--gres=gpu:1 -n"' -mpi_numprocs = '24' - -runtest = False - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'bio' diff --git a/Golden_Repo/g/GROMACS/GROMACS-2020.4-plumed-gpsmkl-2020.eb b/Golden_Repo/g/GROMACS/GROMACS-2020.4-plumed-gpsmkl-2020.eb deleted file mode 100644 index 8cdf15c34e5df6bafcff5b49cdcca2ea357e53fb..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GROMACS/GROMACS-2020.4-plumed-gpsmkl-2020.eb +++ /dev/null @@ -1,70 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2012-2016 University of Luxembourg / LCSB, Cyprus Institute / CaSToRC, -# Ghent University / The Francis Crick Institute -# Authors:: -# * Wiktor Jurkowski <wiktor.jurkowski@gmail.com> -# * Fotis Georgatos <fotis@cern.ch> -# * George Tsouloupas <g.tsouloupas@cyi.ac.cy> -# * Kenneth Hoste <kenneth.hoste@ugent.be> -# * Adam Huffman <adam.huffman@crick.ac.uk> -# License:: MIT/GPL -## - -name = 'GROMACS' -version = '2020.4' -versionsuffix = '-plumed' - -homepage = 'http://www.gromacs.org' -description = """ -GROMACS is a versatile package to perform molecular dynamics, i.e. simulate the Newtonian equations -of motion for systems with hundreds to millions of particles. It is primarily designed for -biochemical molecules like proteins and lipids that have a lot of complicated bonded interactions, -but since GROMACS is extremely fast at calculating the non-bonded interactions (that usually -dominate simulations) many groups are also using it for research on non-biological systems, e.g. -polymers. - -Documentation -============= -Use `gmx` to execute GROMACS commands on a single node, for example, to prepare your run. Use -`gmx_mpi mdrun` in your job scripts with `srun`. The user documentation of GROMACS can be found at -http://manual.gromacs.org/documentation/current/user-guide/index.html. - -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = ['ftp://ftp.gromacs.org/pub/gromacs/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CMake', '3.18.0'), - ('libxml2', '2.9.10') -] - - -dependencies = [ - ('PLUMED', '2.7.0'), - ('CUDA', '11.0', '', True), -] - -configopts = '-DCMAKE_PREFIX_PATH=$EBROOTHWLOC -DMPIEXEC_MAX_NUMPROCS="24" ' -configopts += '-DMKL_LIBRARIES="${MKLROOT}/lib/intel64/libmkl_intel_ilp64.so;' -configopts += '${MKLROOT}/lib/intel64/libmkl_sequential.so;${MKLROOT}/lib/intel64/libmkl_core.so" ' -configopts += '-DGMX_CUDA_TARGET_SM="60;70;80" ' - -mpiexec = 'srun' -mpiexec_numproc_flag = '"--gres=gpu:1 -n"' -mpi_numprocs = '24' - -runtest = False - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'bio' diff --git a/Golden_Repo/g/GSL/GSL-2.6-GCC-9.3.0.eb b/Golden_Repo/g/GSL/GSL-2.6-GCC-9.3.0.eb deleted file mode 100644 index 5cb447b30528a0e1ce4a5a880c9d8d77226b5c9f..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GSL/GSL-2.6-GCC-9.3.0.eb +++ /dev/null @@ -1,22 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GSL' -version = '2.6' - -homepage = 'http://www.gnu.org/software/gsl/' -description = """The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers. - The library provides a wide range of mathematical routines such as random number generators, special functions - and least-squares fitting. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'opt': True, 'optarch': True, 'unroll': True, 'pic': True} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic" - -moduleclass = 'numlib' diff --git a/Golden_Repo/g/GSL/GSL-2.6-GCCcore-10.3.0.eb b/Golden_Repo/g/GSL/GSL-2.6-GCCcore-10.3.0.eb deleted file mode 100644 index d41d49a5da7c2e9a7ba9cdcb597efb01c2c18c9b..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GSL/GSL-2.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GSL' -version = '2.6' - -homepage = 'http://www.gnu.org/software/gsl/' -description = """The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers. - The library provides a wide range of mathematical routines such as random number generators, special functions - and least-squares fitting. -""" - -site_contacts = 'a.strube@fz-juelich.de' -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'opt': True, 'optarch': True, 'unroll': True, 'pic': True} - -builddependencies = [('binutils', '2.36.1')] - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic" - -moduleclass = 'numlib' diff --git a/Golden_Repo/g/GSL/GSL-2.6-GCCcore-9.3.0.eb b/Golden_Repo/g/GSL/GSL-2.6-GCCcore-9.3.0.eb deleted file mode 100644 index fd66530a9250d41f6b71b93581b355da88ee294c..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GSL/GSL-2.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GSL' -version = '2.6' - -homepage = 'http://www.gnu.org/software/gsl/' -description = """The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers. - The library provides a wide range of mathematical routines such as random number generators, special functions - and least-squares fitting. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'opt': True, 'optarch': True, 'unroll': True, 'pic': True} - -builddependencies = [('binutils', '2.34')] - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic" - -moduleclass = 'numlib' diff --git a/Golden_Repo/g/GSL/GSL-2.6-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/g/GSL/GSL-2.6-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index d31e9d20ce305501906ca40509519d9866e74e36..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GSL/GSL-2.6-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,22 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GSL' -version = '2.6' - -homepage = 'http://www.gnu.org/software/gsl/' -description = """The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers. - The library provides a wide range of mathematical routines such as random number generators, special functions - and least-squares fitting. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -toolchainopts = {'opt': True, 'optarch': True, 'unroll': True, 'pic': True} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic" - -moduleclass = 'numlib' diff --git a/Golden_Repo/g/GTK+/GTK+-3.24.17-GCCcore-10.3.0.eb b/Golden_Repo/g/GTK+/GTK+-3.24.17-GCCcore-10.3.0.eb deleted file mode 100644 index 9bd1ee45008499db480481c2955bd3c34aaff169..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GTK+/GTK+-3.24.17-GCCcore-10.3.0.eb +++ /dev/null @@ -1,77 +0,0 @@ -easyblock = 'Bundle' - -name = 'GTK+' -version = '3.24.17' - -homepage = 'https://developer.gnome.org/gtk+/stable/' -description = """ - The GTK+ 3 package contains libraries used for creating graphical user interfaces for applications. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -builddependencies = [ - ('binutils', '2.36.1'), - ('Coreutils', '8.32'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1'), - ('cairo', '1.17.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('ATK', '2.36.0'), - ('AT-SPI2-ATK', '2.34.2'), - ('Gdk-Pixbuf', '2.40.0'), - ('Pango', '1.44.7'), - ('X11', '20200222'), - ('libepoxy', '1.5.4'), - ('zlib', '1.2.11'), - ('FriBidi', '1.0.9'), - ('X11', '20200222'), # GDK backend - ('librsvg', '2.48.8'), # This installs the SVG loader in Gdk-Pixbuf, which is needed for adwaita -] - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCELOWER_TAR_XZ], - 'start_dir': '%(namelower)s-%(version)s', -} - -components = [ - (name, version, { - 'source_urls': [FTPGNOME_SOURCE], - 'checksums': ['f210255b221cb0f0db3e7b21399983b715c9dda6eb1e5c2f7fdf38f4f1b6bac0'], - 'configopts': "--disable-silent-rules --disable-glibtest --enable-introspection=yes --disable-visibility" + - "--disable-wayland-backend --enable-x11-backend", - }), - ('hicolor-icon-theme', '0.17', { - 'source_urls': ['https://icon-theme.freedesktop.org/releases/'], - 'checksums': ['317484352271d18cbbcfac3868eab798d67fff1b8402e740baa6ff41d588a9d8'], - }), - ('adwaita-icon-theme', '3.36.0', { - 'source_urls': [FTPGNOME_SOURCE], - 'checksums': ['1a172112b6da482d3be3de6a0c1c1762886e61e12b4315ae1aae9b69da1ed518'], - }), -] - -postinstallcmds = ['gtk-update-icon-cache'] - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['gtk3-demo', 'gtk3-demo-application', 'gtk3-icon-browser', 'gtk3-widget-factory', - 'gtk-builder-tool', 'gtk-launch', 'gtk-query-immodules-3.0', 'gtk-query-settings', - 'gtk-update-icon-cache']] + - ['lib/%s-%%(version_major)s.%s' % (x, SHLIB_EXT) for x in ['libgailutil', 'libgdk', 'libgtk']], - 'dirs': ['include/%s-%%(version_major)s.0' % x for x in ['gail', 'gtk']] + - ['share/icons/hicolor', 'share/icons/Adwaita'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GTK+/GTK+-3.24.17-GCCcore-9.3.0.eb b/Golden_Repo/g/GTK+/GTK+-3.24.17-GCCcore-9.3.0.eb deleted file mode 100644 index 7d954908e9717fd40220447dce90be6e3c1a3177..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GTK+/GTK+-3.24.17-GCCcore-9.3.0.eb +++ /dev/null @@ -1,77 +0,0 @@ -easyblock = 'Bundle' - -name = 'GTK+' -version = '3.24.17' - -homepage = 'https://developer.gnome.org/gtk+/stable/' -description = """ - The GTK+ 3 package contains libraries used for creating graphical user interfaces for applications. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -builddependencies = [ - ('binutils', '2.34'), - ('Coreutils', '8.32'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1'), - ('cairo', '1.17.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('ATK', '2.36.0'), - ('AT-SPI2-ATK', '2.34.2'), - ('Gdk-Pixbuf', '2.40.0'), - ('Pango', '1.44.7'), - ('X11', '20200222'), - ('libepoxy', '1.5.4'), - ('zlib', '1.2.11'), - ('FriBidi', '1.0.9'), - ('X11', '20200222'), # GDK backend - ('librsvg', '2.48.8'), # This installs the SVG loader in Gdk-Pixbuf, which is needed for adwaita -] - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCELOWER_TAR_XZ], - 'start_dir': '%(namelower)s-%(version)s', -} - -components = [ - (name, version, { - 'source_urls': [FTPGNOME_SOURCE], - 'checksums': ['f210255b221cb0f0db3e7b21399983b715c9dda6eb1e5c2f7fdf38f4f1b6bac0'], - 'configopts': "--disable-silent-rules --disable-glibtest --enable-introspection=yes --disable-visibility" + - "--disable-wayland-backend --enable-x11-backend", - }), - ('hicolor-icon-theme', '0.17', { - 'source_urls': ['https://icon-theme.freedesktop.org/releases/'], - 'checksums': ['317484352271d18cbbcfac3868eab798d67fff1b8402e740baa6ff41d588a9d8'], - }), - ('adwaita-icon-theme', '3.36.0', { - 'source_urls': [FTPGNOME_SOURCE], - 'checksums': ['1a172112b6da482d3be3de6a0c1c1762886e61e12b4315ae1aae9b69da1ed518'], - }), -] - -postinstallcmds = ['gtk-update-icon-cache'] - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['gtk3-demo', 'gtk3-demo-application', 'gtk3-icon-browser', 'gtk3-widget-factory', - 'gtk-builder-tool', 'gtk-launch', 'gtk-query-immodules-3.0', 'gtk-query-settings', - 'gtk-update-icon-cache']] + - ['lib/%s-%%(version_major)s.%s' % (x, SHLIB_EXT) for x in ['libgailutil', 'libgdk', 'libgtk']], - 'dirs': ['include/%s-%%(version_major)s.0' % x for x in ['gail', 'gtk']] + - ['share/icons/hicolor', 'share/icons/Adwaita'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GTS/GTS-0.7.6-GCCcore-10.3.0.eb b/Golden_Repo/g/GTS/GTS-0.7.6-GCCcore-10.3.0.eb deleted file mode 100644 index ff00c34c02c534eca687860124f92822006768a6..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GTS/GTS-0.7.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GTS' -version = '0.7.6' - -homepage = 'http://gts.sourceforge.net/' -description = """GTS stands for the GNU Triangulated Surface Library. -It is an Open Source Free Software Library intended to provide a set of useful -functions to deal with 3D surfaces meshed with interconnected triangles. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('GLib', '2.64.4'), -] - -sanity_check_paths = { - 'files': ['lib/libgts.%s' % SHLIB_EXT, 'bin/gts2oogl', 'bin/gtscheck'], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GTS/GTS-0.7.6-GCCcore-9.3.0.eb b/Golden_Repo/g/GTS/GTS-0.7.6-GCCcore-9.3.0.eb deleted file mode 100644 index 33086f870846a6614b6d1514122ce4aff0b41c62..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GTS/GTS-0.7.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GTS' -version = '0.7.6' - -homepage = 'http://gts.sourceforge.net/' -description = """GTS stands for the GNU Triangulated Surface Library. -It is an Open Source Free Software Library intended to provide a set of useful -functions to deal with 3D surfaces meshed with interconnected triangles. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('GLib', '2.64.4'), -] - -sanity_check_paths = { - 'files': ['lib/libgts.%s' % SHLIB_EXT, 'bin/gts2oogl', 'bin/gtscheck'], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/Gdk-Pixbuf/Gdk-Pixbuf-2.40.0-GCCcore-10.3.0.eb b/Golden_Repo/g/Gdk-Pixbuf/Gdk-Pixbuf-2.40.0-GCCcore-10.3.0.eb deleted file mode 100644 index 4fd4cd09729fe537c175bf36397ddf5799f44c75..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Gdk-Pixbuf/Gdk-Pixbuf-2.40.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'Gdk-Pixbuf' -version = '2.40.0' - -homepage = 'https://developer.gnome.org/gdk-pixbuf/stable/' -description = """ - The Gdk Pixbuf is a toolkit for image loading and pixel buffer manipulation. - It is used by GTK+ 2 and GTK+ 3 to load and manipulate images. In the past it - was distributed as part of GTK+ 2 but it was split off into a separate package - in preparation for the change to GTK+ 3. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - - -builddependencies = [ - ('binutils', '2.36.1'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('GLib', '2.64.4'), - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), -] - -configopts = '-Ddefault_library=both -Dgio_sniffing=false -Dman=false' - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/Gdk-Pixbuf/Gdk-Pixbuf-2.40.0-GCCcore-9.3.0.eb b/Golden_Repo/g/Gdk-Pixbuf/Gdk-Pixbuf-2.40.0-GCCcore-9.3.0.eb deleted file mode 100644 index 3492c7813d9dcf2abb03ca0f4ec09983f48c78e8..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Gdk-Pixbuf/Gdk-Pixbuf-2.40.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'Gdk-Pixbuf' -version = '2.40.0' - -homepage = 'https://developer.gnome.org/gdk-pixbuf/stable/' -description = """ - The Gdk Pixbuf is a toolkit for image loading and pixel buffer manipulation. - It is used by GTK+ 2 and GTK+ 3 to load and manipulate images. In the past it - was distributed as part of GTK+ 2 but it was split off into a separate package - in preparation for the change to GTK+ 3. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - - -builddependencies = [ - ('binutils', '2.34'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('GLib', '2.64.4'), - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), -] - -configopts = '-Ddefault_library=both -Dgio_sniffing=false -Dman=false' - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/Ghostscript/Ghostscript-9.52-GCCcore-10.3.0.eb b/Golden_Repo/g/Ghostscript/Ghostscript-9.52-GCCcore-10.3.0.eb deleted file mode 100644 index c1436a3f803b0e0592a1c3ed94bbbcc7f55c53f4..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Ghostscript/Ghostscript-9.52-GCCcore-10.3.0.eb +++ /dev/null @@ -1,57 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Ghostscript' -version = '9.52' - -homepage = 'https://ghostscript.com' -description = """Ghostscript is a versatile processor for PostScript data with the ability to render PostScript to - different targets. It used to be part of the cups printing stack, but is no longer used for that.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs%(version_major)s%(version_minor)s/', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['c2501d8e8e0814c4a5aa7e443e230e73d7af7f70287546f7b697e5ef49e32176'] - -dependencies = [ - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('freetype', '2.10.1'), - ('libjpeg-turbo', '2.0.5'), - ('expat', '2.2.9'), - ('GLib', '2.64.4'), - ('cairo', '1.17.2'), - ('LibTIFF', '4.1.0'), -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.36.1'), -] - -# Do not use local copies of zlib, jpeg, freetype, and png -preconfigopts = "mv zlib zlib.no && mv jpeg jpeg.no && mv freetype freetype.no && mv libpng libpng.no && " -preconfigopts += 'export LIBS="$LIBS -lz" && ' - -configopts = "--with-system-libtiff --enable-dynamic" - -postinstallcmds = [ - # build and install shared libs - "make so && make soinstall", - # install header files - "mkdir -p %(installdir)s/include/ghostscript", - "install -v -m644 base/*.h %(installdir)s/include/ghostscript", - "install -v -m644 psi/*.h %(installdir)s/include/ghostscript", -] - -sanity_check_paths = { - 'files': ['bin/gs', 'lib/libgs.%s' % SHLIB_EXT], - 'dirs': ['lib/ghostscript', 'include/ghostscript', 'share/man'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/g/Ghostscript/Ghostscript-9.52-GCCcore-9.3.0.eb b/Golden_Repo/g/Ghostscript/Ghostscript-9.52-GCCcore-9.3.0.eb deleted file mode 100644 index 5d8181f88290a666c22bcde2af4352b9d0e762b7..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Ghostscript/Ghostscript-9.52-GCCcore-9.3.0.eb +++ /dev/null @@ -1,57 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Ghostscript' -version = '9.52' - -homepage = 'https://ghostscript.com' -description = """Ghostscript is a versatile processor for PostScript data with the ability to render PostScript to - different targets. It used to be part of the cups printing stack, but is no longer used for that.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs%(version_major)s%(version_minor)s/', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['c2501d8e8e0814c4a5aa7e443e230e73d7af7f70287546f7b697e5ef49e32176'] - -dependencies = [ - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('freetype', '2.10.1'), - ('libjpeg-turbo', '2.0.5'), - ('expat', '2.2.9'), - ('GLib', '2.64.4'), - ('cairo', '1.17.2'), - ('LibTIFF', '4.1.0'), -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.34'), -] - -# Do not use local copies of zlib, jpeg, freetype, and png -preconfigopts = "mv zlib zlib.no && mv jpeg jpeg.no && mv freetype freetype.no && mv libpng libpng.no && " -preconfigopts += 'export LIBS="$LIBS -lz" && ' - -configopts = "--with-system-libtiff --enable-dynamic" - -postinstallcmds = [ - # build and install shared libs - "make so && make soinstall", - # install header files - "mkdir -p %(installdir)s/include/ghostscript", - "install -v -m644 base/*.h %(installdir)s/include/ghostscript", - "install -v -m644 psi/*.h %(installdir)s/include/ghostscript", -] - -sanity_check_paths = { - 'files': ['bin/gs', 'lib/libgs.%s' % SHLIB_EXT], - 'dirs': ['lib/ghostscript', 'include/ghostscript', 'share/man'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/g/Go/Go-1.15.3.eb b/Golden_Repo/g/Go/Go-1.15.3.eb deleted file mode 100644 index be877cde4e6b8a3a651ef5f683422ca9c95ee990..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Go/Go-1.15.3.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'Tarball' - -name = 'Go' -version = '1.15.3' - -homepage = 'http://www.golang.org' -description = """Go is an open source programming language that makes it easy to build - simple, reliable, and efficient software.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://storage.googleapis.com/golang/'] -sources = ['%(namelower)s%(version)s.linux-amd64.tar.gz'] - -sanity_check_paths = { - 'files': ['bin/go', 'bin/gofmt'], - 'dirs': ['api', 'doc', 'lib', 'pkg'], -} - -modextravars = {'GOROOT': '%(installdir)s'} - -moduleclass = 'compiler' diff --git a/Golden_Repo/g/Grace/Grace-5.1.25-GCCcore-10.3.0.eb b/Golden_Repo/g/Grace/Grace-5.1.25-GCCcore-10.3.0.eb deleted file mode 100644 index 903acad751384f705af4a56e5b80d7f18e5b28fb..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Grace/Grace-5.1.25-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Grace' -version = '5.1.25' - -homepage = 'http://freecode.com/projects/grace' -description = 'Grace is a WYSIWYG 2D plotting tool for X Windows System and Motif.' - -site_contacts = 'sc@fz-juelich.de' - -source_urls = ['ftp://plasma-gate.weizmann.ac.il/pub/grace/src/stable'] -sources = [SOURCELOWER_TAR_GZ] - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('motif', '2.3.8'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('X11', '20200222'), -] - -configopts = "--enable-grace-home='$(PREFIX)'" - -runtest = 'tests' - -sanity_check_paths = { - 'files': ['bin/xmgrace'], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/Grace/Grace-5.1.25-GCCcore-9.3.0.eb b/Golden_Repo/g/Grace/Grace-5.1.25-GCCcore-9.3.0.eb deleted file mode 100644 index 2726c20659b3fbf1fa80f02b12ca470f74d96b77..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Grace/Grace-5.1.25-GCCcore-9.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Grace' -version = '5.1.25' - -homepage = 'http://freecode.com/projects/grace' -description = 'Grace is a WYSIWYG 2D plotting tool for X Windows System and Motif.' - -site_contacts = 'sc@fz-juelich.de' - -source_urls = ['ftp://plasma-gate.weizmann.ac.il/pub/grace/src/stable'] -sources = [SOURCELOWER_TAR_GZ] - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('motif', '2.3.8'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('X11', '20200222'), -] - -configopts = "--enable-grace-home='$(PREFIX)'" - -runtest = 'tests' - -sanity_check_paths = { - 'files': ['bin/xmgrace'], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GraphicsMagick/GraphicsMagick-1.3.35-GCCcore-10.3.0.eb b/Golden_Repo/g/GraphicsMagick/GraphicsMagick-1.3.35-GCCcore-10.3.0.eb deleted file mode 100644 index 19c3121f5a2d8cdab703609106b42dde9d964f2a..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GraphicsMagick/GraphicsMagick-1.3.35-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GraphicsMagick' -version = '1.3.35' - -homepage = 'http://www.graphicsmagick.org/' -description = """GraphicsMagick is the swiss army knife of image processing. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - SOURCEFORGE_SOURCE, - 'ftp://ftp.graphicsmagick.org/pub/GraphicsMagick/%(version_major_minor)s/', -] -sources = [SOURCE_TAR_GZ] - -configopts = "--enable-shared --enable-static " - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), - ('libtool', '2.4.6'), -] - -dependencies = [ - ('X11', '20200222'), - ('bzip2', '1.0.8'), - ('freetype', '2.10.1'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('LibTIFF', '4.1.0'), - ('libxml2', '2.9.10'), - ('XZ', '5.2.5'), - ('zlib', '1.2.11'), -] - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GraphicsMagick/GraphicsMagick-1.3.35-GCCcore-9.3.0.eb b/Golden_Repo/g/GraphicsMagick/GraphicsMagick-1.3.35-GCCcore-9.3.0.eb deleted file mode 100644 index 81aac97f88dc0950a239efc151733feb08c88743..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GraphicsMagick/GraphicsMagick-1.3.35-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'GraphicsMagick' -version = '1.3.35' - -homepage = 'http://www.graphicsmagick.org/' -description = """GraphicsMagick is the swiss army knife of image processing. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - SOURCEFORGE_SOURCE, - 'ftp://ftp.graphicsmagick.org/pub/GraphicsMagick/%(version_major_minor)s/', -] -sources = [SOURCE_TAR_GZ] - -configopts = "--enable-shared --enable-static " - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), - ('libtool', '2.4.6'), -] - -dependencies = [ - ('X11', '20200222'), - ('bzip2', '1.0.8'), - ('freetype', '2.10.1'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('LibTIFF', '4.1.0'), - ('libxml2', '2.9.10'), - ('XZ', '5.2.5'), - ('zlib', '1.2.11'), -] - -moduleclass = 'vis' diff --git a/Golden_Repo/g/Graphviz/Graphviz-2.44.1-GCCcore-10.3.0.eb b/Golden_Repo/g/Graphviz/Graphviz-2.44.1-GCCcore-10.3.0.eb deleted file mode 100644 index 860989244884a9af869d09d4d092f9ba0e81d4a5..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Graphviz/Graphviz-2.44.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,79 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Graphviz' -version = '2.44.1' - -homepage = 'http://www.graphviz.org/' -description = """Graphviz is open source graph visualization software. Graph visualization -is a way of representing structural information as diagrams of -abstract graphs and networks. It has important applications in networking, -bioinformatics, software engineering, database and web design, machine learning, -and in visual interfaces for other technical domains. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://www2.graphviz.org/Packages/stable/portable_source/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = ['tclpkg-Makefile.am.patch'] - -builddependencies = [ - ('Autotools', '20200321'), - ('binutils', '2.36.1'), - ('Bison', '3.7.6'), - ('flex', '2.6.4'), - ('pkg-config', '0.29.2'), - ('SWIG', '4.0.2', '-Python-3.8.5'), -] - -dependencies = [ - ('cairo', '1.17.2'), - ('expat', '2.2.9'), - ('freetype', '2.10.1'), - ('X11', '20200222'), - ('Ghostscript', '9.52'), - ('GLib', '2.64.4'), - ('GTS', '0.7.6'), - ('Java', '15', '', SYSTEM), - ('libpng', '1.6.37'), - ('librsvg', '2.48.8'), - ('Pango', '1.44.7'), - ('Qt5', '5.14.2'), - ('Tcl', '8.6.10'), - ('zlib', '1.2.11'), -] - -preconfigopts = './autogen.sh NOCONFIG && ' - -configopts = '--enable-guile=no --enable-lua=no --enable-ocaml=no ' -configopts += '--enable-r=no --enable-ruby=no --enable-perl=no ' -configopts += '--enable-python=no --enable-python2=no --enable-python3=no ' -# Use ltdl from libtool in EB -configopts += '--enable-ltdl --without-included-ltdl --disable-ltdl-install ' -configopts += '--with-ltdl-include=$EBROOTLIBTOOL/include --with-ltdl-lib=$EBROOTLIBTOOL/lib ' -configopts += '--with-expatincludedir=$EBROOTEXPAT/include --with-expatlibdir=$EBROOTEXPAT/lib ' -configopts += '--with-tclsh=$EBROOTTCL/bin/tclsh8.6' - -prebuildopts = 'qmake -o cmd/gvedit/qMakefile cmd/gvedit/gvedit.pro && ' - -sanity_check_paths = { - 'files': ['bin/cluster', 'bin/dot', 'bin/gvmap', - 'lib/libcdt.%s' % SHLIB_EXT, 'lib/libgvc.%s' % SHLIB_EXT, 'lib/libxdot.%s' % SHLIB_EXT], - 'dirs': ['include', 'lib/graphviz'] -} - -sanity_check_commands = [ - ("test ! -d $EBROOTTCL/lib/*/graphviz", ''), - ("test ! -d $EBROOTTCL/lib64/*/graphviz", ''), -] - -modextrapaths = { - 'CLASSPATH': 'lib/graphviz/java/org/graphviz', - 'LD_LIBRARY_PATH': 'lib/graphviz/java', - 'TCLLIBPATH': 'lib/graphviz/tcl', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/Graphviz/Graphviz-2.44.1-GCCcore-9.3.0.eb b/Golden_Repo/g/Graphviz/Graphviz-2.44.1-GCCcore-9.3.0.eb deleted file mode 100644 index 973ed942e553d3b8bcc8366b05b7a1e938921277..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Graphviz/Graphviz-2.44.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,79 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Graphviz' -version = '2.44.1' - -homepage = 'http://www.graphviz.org/' -description = """Graphviz is open source graph visualization software. Graph visualization -is a way of representing structural information as diagrams of -abstract graphs and networks. It has important applications in networking, -bioinformatics, software engineering, database and web design, machine learning, -and in visual interfaces for other technical domains. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://www2.graphviz.org/Packages/stable/portable_source/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = ['tclpkg-Makefile.am.patch'] - -builddependencies = [ - ('Autotools', '20200321'), - ('binutils', '2.34'), - ('Bison', '3.6.4'), - ('flex', '2.6.4'), - ('pkg-config', '0.29.2'), - ('SWIG', '3.0.12', '-Python-3.8.5'), -] - -dependencies = [ - ('cairo', '1.17.2'), - ('expat', '2.2.9'), - ('freetype', '2.10.1'), - ('X11', '20200222'), - ('Ghostscript', '9.52'), - ('GLib', '2.64.4'), - ('GTS', '0.7.6'), - ('Java', '1.8', '', SYSTEM), - ('libpng', '1.6.37'), - ('librsvg', '2.48.8'), - ('Pango', '1.44.7'), - ('Qt5', '5.14.2'), - ('Tcl', '8.6.10'), - ('zlib', '1.2.11'), -] - -preconfigopts = './autogen.sh NOCONFIG && ' - -configopts = '--enable-guile=no --enable-lua=no --enable-ocaml=no ' -configopts += '--enable-r=no --enable-ruby=no --enable-perl=no ' -configopts += '--enable-python=no --enable-python2=no --enable-python3=no ' -# Use ltdl from libtool in EB -configopts += '--enable-ltdl --without-included-ltdl --disable-ltdl-install ' -configopts += '--with-ltdl-include=$EBROOTLIBTOOL/include --with-ltdl-lib=$EBROOTLIBTOOL/lib ' -configopts += '--with-expatincludedir=$EBROOTEXPAT/include --with-expatlibdir=$EBROOTEXPAT/lib ' -configopts += '--with-tclsh=$EBROOTTCL/bin/tclsh8.6' - -prebuildopts = 'qmake -o cmd/gvedit/qMakefile cmd/gvedit/gvedit.pro && ' - -sanity_check_paths = { - 'files': ['bin/cluster', 'bin/dot', 'bin/gvmap', - 'lib/libcdt.%s' % SHLIB_EXT, 'lib/libgvc.%s' % SHLIB_EXT, 'lib/libxdot.%s' % SHLIB_EXT], - 'dirs': ['include', 'lib/graphviz'] -} - -sanity_check_commands = [ - ("test ! -d $EBROOTTCL/lib/*/graphviz", ''), - ("test ! -d $EBROOTTCL/lib64/*/graphviz", ''), -] - -modextrapaths = { - 'CLASSPATH': 'lib/graphviz/java/org/graphviz', - 'LD_LIBRARY_PATH': 'lib/graphviz/java', - 'TCLLIBPATH': 'lib/graphviz/tcl', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/Graphviz/tclpkg-Makefile.am.patch b/Golden_Repo/g/Graphviz/tclpkg-Makefile.am.patch deleted file mode 100644 index 2d7d815c5f366a4140a44e2cb9d5c250e02733b3..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/Graphviz/tclpkg-Makefile.am.patch +++ /dev/null @@ -1,102 +0,0 @@ ---- tclpkg/Makefile.am.orig 2020-08-30 21:34:05.800467382 +0200 -+++ tclpkg/Makefile.am 2020-08-30 21:34:32.306654331 +0200 -@@ -34,98 +34,7 @@ - # ./configure --prefix=$HOME/graphviz; make; make install - # without root privileges. - install-data-hook: --if WITH_LUA -- -mkdir -p $(DESTDIR)@LUA_INSTALL_DIR@; -- if test -w $(DESTDIR)@LUA_INSTALL_DIR@; then \ -- (cd $(DESTDIR)@LUA_INSTALL_DIR@; \ -- cp -f $(DESTDIR)$(pkgluadir)/libgv_lua.so gv.so;) \ -- else \ -- echo "Warning: @LUA_INSTALL_DIR@ is not writable."; \ -- echo "Skipping system installation of lua binding."; \ -- fi --endif --if WITH_PERL -- -mkdir -p $(DESTDIR)@PERL_INSTALL_DIR@; -- if test -w $(DESTDIR)@PERL_INSTALL_DIR@; then \ -- (cd $(DESTDIR)@PERL_INSTALL_DIR@; \ -- cp -f $(DESTDIR)$(pkgperldir)/libgv_perl.so gv.so; \ -- cp -f $(DESTDIR)$(pkgperldir)/gv.pm gv.pm;) \ -- else \ -- echo "Warning: @PERL_INSTALL_DIR@ is not writable."; \ -- echo "Skipping system installation of perl binding."; \ -- fi --endif --if WITH_PHP -- -mkdir -p $(DESTDIR)@PHP_INSTALL_DIR@; -- if test -w $(DESTDIR)@PHP_INSTALL_DIR@; then \ -- (cd $(DESTDIR)@PHP_INSTALL_DIR@; \ -- cp -f $(DESTDIR)$(pkgphpdir)/libgv_php.so gv.so;) \ -- else \ -- echo "Warning: @PHP_INSTALL_DIR@ is not writable."; \ -- echo "Skipping system installation of php binding."; \ -- fi -- -mkdir -p $(DESTDIR)@PHP_INSTALL_DATADIR@; -- if test -w $(DESTDIR)@PHP_INSTALL_DATADIR@; then \ -- (cd $(DESTDIR)@PHP_INSTALL_DATADIR@; \ -- cp -f $(DESTDIR)$(pkgphpdir)/gv.php gv.php;) \ -- else \ -- echo "Warning: @PHP_INSTALL_DATADIR@ is not writable."; \ -- echo "Skipping system installation of php binding."; \ -- fi --endif --if WITH_PYTHON -- -mkdir -p $(DESTDIR)@PYTHON_INSTALL_DIR@; -- if test -w $(DESTDIR)@PYTHON_INSTALL_DIR@; then \ -- (cd $(DESTDIR)@PYTHON_INSTALL_DIR@; \ -- cp -f $(DESTDIR)$(pkgpythondir)/libgv_python.so _gv.so; \ -- cp -f $(DESTDIR)$(pkgpythondir)/gv.py gv.py;) \ -- else \ -- echo "Warning: @PYTHON_INSTALL_DIR@ is not writable."; \ -- echo "Skipping system installation of python binding."; \ -- fi --endif --if WITH_PYTHON2 -- -mkdir -p $(DESTDIR)@PYTHON2_INSTALL_DIR@; -- if test -w $(DESTDIR)@PYTHON2_INSTALL_DIR@; then \ -- (cd $(DESTDIR)@PYTHON2_INSTALL_DIR@; \ -- cp -f $(DESTDIR)$(pkgpython2dir)/libgv_python2.so _gv.so; \ -- cp -f $(DESTDIR)$(pkgpython2dir)/gv.py gv.py;) \ -- else \ -- echo "Warning: @PYTHON3_INSTALL_DIR@ is not writable."; \ -- echo "Skipping system installation of python2 binding."; \ -- fi --endif --if WITH_PYTHON3 -- -mkdir -p $(DESTDIR)@PYTHON3_INSTALL_DIR@; -- if test -w $(DESTDIR)@PYTHON3_INSTALL_DIR@; then \ -- (cd $(DESTDIR)@PYTHON3_INSTALL_DIR@; \ -- cp -f $(DESTDIR)$(pkgpython3dir)/libgv_python3.so _gv.so; \ -- cp -f $(DESTDIR)$(pkgpython3dir)/gv.py gv.py;) \ -- else \ -- echo "Warning: @PYTHON3_INSTALL_DIR@ is not writable."; \ -- echo "Skipping system installation of python3 binding."; \ -- fi --endif --if WITH_RUBY -- -mkdir -p $(DESTDIR)@RUBY_INSTALL_DIR@; -- if test -w $(DESTDIR)@RUBY_INSTALL_DIR@; then \ -- (cd $(DESTDIR)@RUBY_INSTALL_DIR@; \ -- cp -f $(DESTDIR)$(pkgrubydir)/libgv_ruby.so gv.so;) \ -- else \ -- echo "Warning: @RUBY_INSTALL_DIR@ is not writable."; \ -- echo "Skipping system installation of ruby binding."; \ -- fi --endif --if WITH_TCL -- -mkdir -p $(DESTDIR)@TCL_INSTALL_DIR@; -- if test -w $(DESTDIR)@TCL_INSTALL_DIR@/; then \ -- (cd $(DESTDIR)@TCL_INSTALL_DIR@; \ -- cp -rf $(DESTDIR)$(pkgtcldir) @PACKAGE_NAME@;) \ -- else \ -- echo "Warning: @TCL_INSTALL_DIR@ is not writable."; \ -- echo "Skipping system installation of tcl bindings."; \ -- fi --endif -+ echo "installation patched out" - - # removal of installs into @xxx_INSTALL_DIR@ fail if root - # has installed a system copy diff --git a/Golden_Repo/g/GtkSourceView/GtkSourceView-4.4.0-GCCcore-10.3.0.eb b/Golden_Repo/g/GtkSourceView/GtkSourceView-4.4.0-GCCcore-10.3.0.eb deleted file mode 100644 index 62245e901a0c9899b1d3d1a7a135919c71d42540..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GtkSourceView/GtkSourceView-4.4.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'GtkSourceView' -version = '4.4.0' - -homepage = 'https://wiki.gnome.org/Projects/GtkSourceView' -description = """ - GtkSourceView is a portable C library that extends the standard GTK+ framework for multiline text editing with support - for configurable syntax highlighting, unlimited undo/redo, search and replace, a completion framework, printing and - other features typical of a source code editor. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1'), -] - -dependencies = [ - ('GLib', '2.64.4'), - ('GTK+', '3.24.17'), - ('libxml2', '2.9.10'), - ('FriBidi', '1.0.9'), -] - -configopts = "--buildtype=release " -configopts += "-Dgir=true -Dvapi=false " - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib/lib%%(namelower)s-%%(version_major)s.%s' % SHLIB_EXT], - 'dirs': ['include/%(namelower)s-%(version_major)s', 'share'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/GtkSourceView/GtkSourceView-4.4.0-GCCcore-9.3.0.eb b/Golden_Repo/g/GtkSourceView/GtkSourceView-4.4.0-GCCcore-9.3.0.eb deleted file mode 100644 index 3d729760d9fa3ff236311f5fa6918250073bebaf..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/GtkSourceView/GtkSourceView-4.4.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'GtkSourceView' -version = '4.4.0' - -homepage = 'https://wiki.gnome.org/Projects/GtkSourceView' -description = """ - GtkSourceView is a portable C library that extends the standard GTK+ framework for multiline text editing with support - for configurable syntax highlighting, unlimited undo/redo, search and replace, a completion framework, printing and - other features typical of a source code editor. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1'), -] - -dependencies = [ - ('GLib', '2.64.4'), - ('GTK+', '3.24.17'), - ('libxml2', '2.9.10'), - ('FriBidi', '1.0.9'), -] - -configopts = "--buildtype=release " -configopts += "-Dgir=true -Dvapi=false " - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib/lib%%(namelower)s-%%(version_major)s.%s' % SHLIB_EXT], - 'dirs': ['include/%(namelower)s-%(version_major)s', 'share'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/g2clib/g2clib-1.6.0-GCCcore-10.3.0.eb b/Golden_Repo/g/g2clib/g2clib-1.6.0-GCCcore-10.3.0.eb deleted file mode 100644 index 1b6521111e499bdff14b601a70409cd2a0adb0dc..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/g2clib/g2clib-1.6.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'g2clib' -version = '1.6.0' - -homepage = 'http://www.nco.ncep.noaa.gov/pmb/codes/GRIB2/' -description = "Library contains GRIB2 encoder/decoder ('C' version)." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'opt': True} - -sources = [SOURCE_TAR] -source_urls = [homepage] - -patches = [ - 'g2clib-1.6.0_no_inmem_jasper.patch' -] - -builddependencies = [('binutils', '2.36.1')] - -dependencies = [ - ('JasPer', '2.0.19'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'data' diff --git a/Golden_Repo/g/g2clib/g2clib-1.6.0-GCCcore-9.3.0.eb b/Golden_Repo/g/g2clib/g2clib-1.6.0-GCCcore-9.3.0.eb deleted file mode 100644 index 2955f6b81655c1d09567211312ee87b3bc710ed5..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/g2clib/g2clib-1.6.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'g2clib' -version = '1.6.0' - -homepage = 'http://www.nco.ncep.noaa.gov/pmb/codes/GRIB2/' -description = "Library contains GRIB2 encoder/decoder ('C' version)." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'opt': True} - -sources = [SOURCE_TAR] -source_urls = [homepage] - -patches = [ - 'g2clib-1.6.0_no_inmem_jasper.patch' -] - -builddependencies = [('binutils', '2.34')] - -dependencies = [ - ('JasPer', '2.0.19'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'data' diff --git a/Golden_Repo/g/g2clib/g2clib-1.6.0_no_inmem_jasper.patch b/Golden_Repo/g/g2clib/g2clib-1.6.0_no_inmem_jasper.patch deleted file mode 100644 index 8e287cc97b792f8515096baba926ba89e8e46eaf..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/g2clib/g2clib-1.6.0_no_inmem_jasper.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ruN g2clib-1.6.0.old/enc_jpeg2000.c g2clib-1.6.0/enc_jpeg2000.c ---- g2clib-1.6.0.old/enc_jpeg2000.c 2017-03-22 18:12:47.219593061 +0100 -+++ g2clib-1.6.0/enc_jpeg2000.c 2017-03-22 18:13:28.359251245 +0100 -@@ -121,7 +121,7 @@ - image.clrspc_=JAS_CLRSPC_SGRAY; /* grayscale Image */ - image.cmprof_=0; - #endif -- image.inmem_=1; -+// image.inmem_=1; - - cmpt.tlx_=0; - cmpt.tly_=0; diff --git a/Golden_Repo/g/g2lib/fix_makefile.patch b/Golden_Repo/g/g2lib/fix_makefile.patch deleted file mode 100644 index eb76f8e415175c8bb76b908dc64ecd1224dbf406..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/g2lib/fix_makefile.patch +++ /dev/null @@ -1,30 +0,0 @@ ---- g2lib-3.1.0.orig/makefile 2017-10-11 18:42:27.259459096 +0200 -+++ g2lib-3.1.0/makefile 2017-10-11 18:56:01.474558217 +0200 -@@ -111,7 +111,7 @@ - CPP=cpp -P -C - MODDIR=. - CFLAGS=-O3 $(DEFS) $(INCDIR) --FFLAGS=-O3 -g -I $(MODDIR) -+FFLAGS=-O3 -g -I$(MODDIR) - - # - #-------------------------------------- -diff -ruN g2lib-3.1.0.orig/makefile g2lib-3.1.0/makefile ---- g2lib-3.1.0.orig/makefile 2017-10-11 18:42:27.259459096 +0200 -+++ g2lib-3.1.0/makefile 2017-10-11 18:56:01.474558217 +0200 -@@ -161,6 +161,7 @@ - $(LIB)(gribinfo.o) \ - $(LIB)(mkieee.o) \ - $(LIB)(rdieee.o) \ -+ $(LIB)(intmath.o) \ - $(LIB)(simpack.o) \ - $(LIB)(simunpack.o) \ - $(LIB)(cmplxpack.o) \ -@@ -196,7 +197,6 @@ - $(LIB)(params.o) \ - $(LIB)(params_ecmwf.o) \ - $(LIB)(getidx.o) \ -- $(LIB)(intmath.o) \ - $(LIB)(gdt2gds.o) - - .F.f: diff --git a/Golden_Repo/g/g2lib/g2lib-3.1.0-GCCcore-10.3.0.eb b/Golden_Repo/g/g2lib/g2lib-3.1.0-GCCcore-10.3.0.eb deleted file mode 100644 index 95f4b6b45a11bb84a2afd96b38e5c8203e48a688..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/g2lib/g2lib-3.1.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'g2lib' -version = '3.1.0' - -homepage = 'http://www.nco.ncep.noaa.gov/pmb/codes/GRIB2/' -description = """Library contains GRIB2 encoder/decoder and search/indexing routines. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'opt': True} - -sources = [SOURCE_TAR] -source_urls = [homepage] - -patches = [ - 'fix_makefile.patch', - 'g2lib-1.4.0-with-JasPer-2.x.patch', - 'g2lib_bitmask.patch', - 'g2lib_adapt_iand_for_gcc9.patch', -] - -builddependencies = [('binutils', '2.36.1')] - -dependencies = [ - ('JasPer', '2.0.19'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -buildopts = 'FFLAGS="$FFLAGS -fallow-argument-mismatch"' - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'data' diff --git a/Golden_Repo/g/g2lib/g2lib-3.1.0-GCCcore-9.3.0.eb b/Golden_Repo/g/g2lib/g2lib-3.1.0-GCCcore-9.3.0.eb deleted file mode 100644 index 6551d4d880fd781b094e81f0d4cd2d8baadfe075..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/g2lib/g2lib-3.1.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'g2lib' -version = '3.1.0' - -homepage = 'http://www.nco.ncep.noaa.gov/pmb/codes/GRIB2/' -description = """Library contains GRIB2 encoder/decoder and search/indexing routines. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'opt': True} - -sources = [SOURCE_TAR] -source_urls = [homepage] - -patches = [ - 'fix_makefile.patch', - 'g2lib-1.4.0-with-JasPer-2.x.patch', - 'g2lib_bitmask.patch', - 'g2lib_adapt_iand_for_gcc9.patch', -] - -builddependencies = [('binutils', '2.34')] - -dependencies = [ - ('JasPer', '2.0.19'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'data' diff --git a/Golden_Repo/g/g2lib/g2lib_adapt_iand_for_gcc9.patch b/Golden_Repo/g/g2lib/g2lib_adapt_iand_for_gcc9.patch deleted file mode 100644 index b783ede66c185d0855368a96e05bca407226ade0..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/g2lib/g2lib_adapt_iand_for_gcc9.patch +++ /dev/null @@ -1,20 +0,0 @@ ---- intmath.f.orig 2020-08-24 13:37:37.198774401 +0200 -+++ intmath.f 2020-08-24 13:45:41.811727000 +0200 -@@ -169,7 +169,7 @@ - ilog2_2=0 - i=i_in - if(i<=0) return -- if(iand(i,i-1)/=0) then -+ if(iand(int(i, kind(i-1)),i-1)/=0) then - !write(0,*) 'iand i-1' - ilog2_2=1 - endif -@@ -204,7 +204,7 @@ - ilog2_1=0 - i=i_in - if(i<=0) return -- if(iand(i,i-1)/=0) then -+ if(iand(int(i, kind(i-1)),i-1)/=0) then - !write(0,*) 'iand i-1' - ilog2_1=1 - endif diff --git a/Golden_Repo/g/g2lib/g2lib_bitmask.patch b/Golden_Repo/g/g2lib/g2lib_bitmask.patch deleted file mode 100644 index 93c2bc4e309f598b8541e37f2b9482348b1aa88e..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/g2lib/g2lib_bitmask.patch +++ /dev/null @@ -1,14 +0,0 @@ -diff -ruN g2lib-3.1.0.orig/addfield.f g2lib-3.1.0/addfield.f ---- g2lib-3.1.0.orig/addfield.f 2017-10-11 18:42:27.260459115 +0200 -+++ g2lib-3.1.0/addfield.f 2017-10-11 18:43:55.107089682 +0200 -@@ -119,7 +119,9 @@ - integer lensec3,lensec4,lensec5,lensec6,lensec7 - logical issec3,needext,isprevbmap - -- allones=Z'FFFFFFFF' -+ ! Z'FFFFFFFF' causes and overflow and does not really set the -+ ! whole bit mask to 1. -1 does. -+ allones=-1 - ierr=0 - ! - ! Check to see if beginning of GRIB message exists diff --git a/Golden_Repo/g/gc/gc-8.0.2-GCCcore-10.3.0.eb b/Golden_Repo/g/gc/gc-8.0.2-GCCcore-10.3.0.eb deleted file mode 100644 index a4877749fb00217ea6e7c2a3e17b98d96bf1208a..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gc/gc-8.0.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gc' -version = '8.0.2' - -homepage = 'http://hboehm.info/gc/' -description = """The Boehm-Demers-Weiser conservative garbage collector can be used as a garbage collecting replacement -for C malloc or C++ new. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://hboehm.info/gc/gc_source/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1') -] - -configopts = '--enable-static' - -sanity_check_paths = { - 'files': ['include/gc.h', 'lib/libcord.a', 'lib/libcord.%s' % SHLIB_EXT, 'lib/libgc.a', 'lib/libgc.%s' % SHLIB_EXT], - 'dirs': ['include/gc', 'share'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/g/gc/gc-8.0.2-GCCcore-9.3.0.eb b/Golden_Repo/g/gc/gc-8.0.2-GCCcore-9.3.0.eb deleted file mode 100644 index e24ddede9f8c7a717bf8dcd3be00464ffd6ee186..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gc/gc-8.0.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gc' -version = '8.0.2' - -homepage = 'http://hboehm.info/gc/' -description = """The Boehm-Demers-Weiser conservative garbage collector can be used as a garbage collecting replacement -for C malloc or C++ new. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://hboehm.info/gc/gc_source/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34') -] - -configopts = '--enable-static' - -sanity_check_paths = { - 'files': ['include/gc.h', 'lib/libcord.a', 'lib/libcord.%s' % SHLIB_EXT, 'lib/libgc.a', 'lib/libgc.%s' % SHLIB_EXT], - 'dirs': ['include/gc', 'share'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/g/gcccoremkl/gcccoremkl-10.3.0-2021.2.0.eb b/Golden_Repo/g/gcccoremkl/gcccoremkl-10.3.0-2021.2.0.eb deleted file mode 100644 index 108f2cd07325b9c68d99aa8d910df40c9cd4b9ac..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gcccoremkl/gcccoremkl-10.3.0-2021.2.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = "Toolchain" - -local_mklver = '2021.2.0' -local_comp_version = '10.3.0' -name = 'gcccoremkl' -version = '%s-%s' % (local_comp_version, local_mklver) - -homepage = '(none)' -description = """GCCcore compiler toolchain with MKL""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -local_comp_name = 'GCCcore' -local_comp = (local_comp_name, local_comp_version) - -# compiler toolchain dependencies -dependencies = [ - local_comp, - ('binutils', '2.36.1', '', local_comp), - ('imkl', local_mklver, '', SYSTEM), -] - -hiddendependencies = [('imkl', local_mklver, '', SYSTEM)] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gcccoremkl/gcccoremkl-9.3.0-2020.2.254.eb b/Golden_Repo/g/gcccoremkl/gcccoremkl-9.3.0-2020.2.254.eb deleted file mode 100644 index 8a73818e9690d6b87797922e0cd3fba7d7d3708c..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gcccoremkl/gcccoremkl-9.3.0-2020.2.254.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = "Toolchain" - -local_mklver = '2020.2.254' -local_comp_version = '9.3.0' -name = 'gcccoremkl' -version = '%s-%s' % (local_comp_version, local_mklver) - -homepage = '(none)' -description = """GCCcore compiler toolchain with MKL""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -local_comp_name = 'GCCcore' -local_comp = (local_comp_name, local_comp_version) - -# compiler toolchain dependencies -dependencies = [ - local_comp, - ('binutils', '2.34', '', local_comp), - ('imkl', local_mklver, '', SYSTEM), -] - -hiddendependencies = [('imkl', local_mklver, '', SYSTEM)] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gettext/gettext-0.20.2-GCCcore-10.3.0.eb b/Golden_Repo/g/gettext/gettext-0.20.2-GCCcore-10.3.0.eb deleted file mode 100644 index 8fba0e77b5fb002392b86b97f00fbdea0b03ea30..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gettext/gettext-0.20.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gettext' -version = '0.20.2' - -homepage = 'http://www.gnu.org/software/gettext/' -description = """GNU `gettext' is an important step for the GNU Translation Project, as it is an asset on which we may -build many other steps. This package offers to programmers, translators, and even users, a well integrated set of tools -and documentation -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [GNU_SOURCE] - -configopts = '--without-emacs --with-libxml2-prefix=$EBROOTLIBXML2' - -dependencies = [ - ('libxml2', '2.9.10'), - ('ncurses', '6.2'), -] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -moduleclass = 'vis' diff --git a/Golden_Repo/g/gettext/gettext-0.20.2-GCCcore-9.3.0.eb b/Golden_Repo/g/gettext/gettext-0.20.2-GCCcore-9.3.0.eb deleted file mode 100644 index 75f22322fb86f86b9738e0787ca1cb8dd26793f0..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gettext/gettext-0.20.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gettext' -version = '0.20.2' - -homepage = 'http://www.gnu.org/software/gettext/' -description = """GNU `gettext' is an important step for the GNU Translation Project, as it is an asset on which we may -build many other steps. This package offers to programmers, translators, and even users, a well integrated set of tools -and documentation -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [GNU_SOURCE] - -configopts = '--without-emacs --with-libxml2-prefix=$EBROOTLIBXML2' - -dependencies = [ - ('libxml2', '2.9.10'), - ('ncurses', '6.2'), -] - -builddependencies = [ - ('binutils', '2.34'), -] - -moduleclass = 'vis' diff --git a/Golden_Repo/g/gettext/gettext-0.20.2.eb b/Golden_Repo/g/gettext/gettext-0.20.2.eb deleted file mode 100644 index c90824899a47a9e555a7d4ae16a4aa8e8a4bf56a..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gettext/gettext-0.20.2.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gettext' -version = '0.20.2' - -homepage = 'https://www.gnu.org/software/gettext/' -description = """GNU 'gettext' is an important step for the GNU Translation Project, as it is an asset on which we may -build many other steps. This package offers to programmers, translators, and even users, a well integrated set of tools -and documentation""" - -site_contacts = 'sc@fz-juelich.de' - -# This is a basic stripped down version of gettext without any -# dependencies on other packages used as initial builddep for XZ -# It is the first step in the cyclic dependency chain of -# XZ -> libxml2 -> gettext -> XZ - -toolchain = SYSTEM - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('ncurses', '6.2'), -] - -configopts = '--without-emacs --with-included-libxml --without-xz --without-bzip2' - -sanity_check_paths = { - 'files': ['bin/gettext', 'lib/libasprintf.a', 'lib/libasprintf.%s' % SHLIB_EXT, - 'lib/libgettextpo.a', 'lib/libgettextpo.%s' % SHLIB_EXT], - 'dirs': ['include'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/g/gflags/gflags-2.2.2-GCCcore-10.3.0.eb b/Golden_Repo/g/gflags/gflags-2.2.2-GCCcore-10.3.0.eb deleted file mode 100644 index d5e8f8f0da801e5ab6c53cbf398a6fb4e41c300d..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gflags/gflags-2.2.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'gflags' -version = '2.2.2' - -homepage = 'https://github.com/gflags/gflags' -description = """ -The gflags package contains a C++ library that implements commandline flags -processing. It includes built-in support for standard types such as string -and the ability to define flags in the source file in which they are used. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/gflags/gflags/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['34af2f15cf7367513b352bdcd2493ab14ce43692d2dcd9dfc499492966c64dcf'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -configopts = '-DBUILD_SHARED_LIBS=on -DBUILD_STATIC_LIBS=on' - -sanity_check_paths = { - 'files': ['bin/gflags_completions.sh'] + - ['lib/%s' % x for x in ['libgflags.%s' % SHLIB_EXT, - 'libgflags_nothreads.%s' % SHLIB_EXT, - 'libgflags.a', - 'libgflags_nothreads.a']] + - ['include/gflags/gflags_completions.h'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/gflags/gflags-2.2.2-GCCcore-9.3.0.eb b/Golden_Repo/g/gflags/gflags-2.2.2-GCCcore-9.3.0.eb deleted file mode 100644 index 7a157771fd591e30ff8a98d7ef7c71f26dd31a7d..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gflags/gflags-2.2.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'gflags' -version = '2.2.2' - -homepage = 'https://github.com/gflags/gflags' -description = """ -The gflags package contains a C++ library that implements commandline flags -processing. It includes built-in support for standard types such as string -and the ability to define flags in the source file in which they are used. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/gflags/gflags/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['34af2f15cf7367513b352bdcd2493ab14ce43692d2dcd9dfc499492966c64dcf'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -configopts = '-DBUILD_SHARED_LIBS=on -DBUILD_STATIC_LIBS=on' - -sanity_check_paths = { - 'files': ['bin/gflags_completions.sh'] + - ['lib/%s' % x for x in ['libgflags.%s' % SHLIB_EXT, - 'libgflags_nothreads.%s' % SHLIB_EXT, - 'libgflags.a', - 'libgflags_nothreads.a']] + - ['include/gflags/gflags_completions.h'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/giflib/giflib-5.2.1-GCCcore-10.3.0.eb b/Golden_Repo/g/giflib/giflib-5.2.1-GCCcore-10.3.0.eb deleted file mode 100644 index ee9ca9cdfd3968002186ebaabfeeaa888ee5cc76..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/giflib/giflib-5.2.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'giflib' -version = '5.2.1' - -homepage = 'http://giflib.sourceforge.net/' -description = """giflib is a library for reading and writing gif images. -It is API and ABI compatible with libungif which was in wide use while -the LZW compression algorithm was patented.""" - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['31da5562f44c5f15d63340a09a4fd62b48c45620cd302f77a6d9acf0077879bd'] - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -builddependencies = [('binutils', '2.36.1')] - -site_contacts = 'a.strube@fz-juelich.de' - -skipsteps = ['configure'] - -installopts = 'PREFIX=%(installdir)s' - -sanity_check_paths = { - 'files': ['bin/giftool'], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/g/giflib/giflib-5.2.1-GCCcore-9.3.0.eb b/Golden_Repo/g/giflib/giflib-5.2.1-GCCcore-9.3.0.eb deleted file mode 100644 index 84a72a4ac42ac106fdd6a3355ee7a7b834735ea3..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/giflib/giflib-5.2.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'giflib' -version = '5.2.1' - -homepage = 'http://giflib.sourceforge.net/' -description = """giflib is a library for reading and writing gif images. -It is API and ABI compatible with libungif which was in wide use while -the LZW compression algorithm was patented.""" - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['31da5562f44c5f15d63340a09a4fd62b48c45620cd302f77a6d9acf0077879bd'] - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -builddependencies = [('binutils', '2.34')] - -site_contacts = 'a.strube@fz-juelich.de' - -skipsteps = ['configure'] - -installopts = 'PREFIX=%(installdir)s' - -sanity_check_paths = { - 'files': ['bin/giftool'], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/g/git-lfs/git-lfs-2.12.0.eb b/Golden_Repo/g/git-lfs/git-lfs-2.12.0.eb deleted file mode 100644 index cb3a19eb5b1a0a4f059cc281b6f03e2eda3be1fc..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/git-lfs/git-lfs-2.12.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'MakeCp' - -name = 'git-lfs' -version = '2.12.0' - -homepage = 'https://git-lfs.github.com' -description = """Git Large File Storage (LFS) replaces large files such as audio - samples, videos, datasets, and graphics with text pointers inside Git, while - storing the file contents on a remote server like GitHub.com""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://github.com/git-lfs/git-lfs/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [('Go', '1.15.3')] - -files_to_copy = [(['bin/%(name)s'], 'bin')] - -sanity_check_paths = { - 'files': ['bin/git-lfs'], - 'dirs': [], -} - -sanity_check_commands = [('git-lfs', '--version')] - -moduleclass = 'tools' diff --git a/Golden_Repo/g/git/git-2.28.0-GCCcore-10.3.0.eb b/Golden_Repo/g/git/git-2.28.0-GCCcore-10.3.0.eb deleted file mode 100644 index 4472d92c3c3e62248bcbd8af9fe101dc27576975..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/git/git-2.28.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,56 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# Authors:: Dmitri Gribenko <gribozavr@gmail.com> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the -# policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-90.html -## - -easyblock = 'ConfigureMake' - -name = 'git' -version = '2.28.0' - -homepage = 'http://git-scm.com/' -description = """ -Git is a free and open source distributed version control system designed to -handle everything from small to very large projects with speed and efficiency. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = ['v%(version)s.tar.gz'] -source_urls = ['https://github.com/git/git/archive'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), -] - -dependencies = [ - ('cURL', '7.71.1'), - ('expat', '2.2.9'), - ('gettext', '0.20.2'), - ('Perl', '5.32.0'), -] - -preconfigopts = 'make configure && ' - -# Work around git build system bug. If LIBS contains -lpthread, then configure -# will not append -lpthread to LDFLAGS, but Makefile ignores LIBS. -configopts = "--with-perl=${EBROOTPERL}/bin/perl --enable-pthreads='-lpthread'" - -sanity_check_paths = { - 'files': ['bin/git'], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/g/git/git-2.28.0-GCCcore-9.3.0.eb b/Golden_Repo/g/git/git-2.28.0-GCCcore-9.3.0.eb deleted file mode 100644 index b2c76687eef2c940ae4ff2462990e8e2e54f4c88..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/git/git-2.28.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,56 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# Authors:: Dmitri Gribenko <gribozavr@gmail.com> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the -# policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-90.html -## - -easyblock = 'ConfigureMake' - -name = 'git' -version = '2.28.0' - -homepage = 'http://git-scm.com/' -description = """ -Git is a free and open source distributed version control system designed to -handle everything from small to very large projects with speed and efficiency. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = ['v%(version)s.tar.gz'] -source_urls = ['https://github.com/git/git/archive'] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), -] - -dependencies = [ - ('cURL', '7.71.1'), - ('expat', '2.2.9'), - ('gettext', '0.20.2'), - ('Perl', '5.32.0'), -] - -preconfigopts = 'make configure && ' - -# Work around git build system bug. If LIBS contains -lpthread, then configure -# will not append -lpthread to LDFLAGS, but Makefile ignores LIBS. -configopts = "--with-perl=${EBROOTPERL}/bin/perl --enable-pthreads='-lpthread'" - -sanity_check_paths = { - 'files': ['bin/git'], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/g/gnuplot/gnuplot-5.2.8-GCCcore-10.3.0.eb b/Golden_Repo/g/gnuplot/gnuplot-5.2.8-GCCcore-10.3.0.eb deleted file mode 100644 index 25d853cb343a471fef567f58a36f35e484cc66be..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gnuplot/gnuplot-5.2.8-GCCcore-10.3.0.eb +++ /dev/null @@ -1,52 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg/Luxembourg Centre for Systems Biomedicine -# Authors:: Fotis Georgatos <fotis.georgatos@uni.lu> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-97.html -## -easyblock = 'ConfigureMake' - -name = 'gnuplot' -version = '5.2.8' - -homepage = 'http://gnuplot.sourceforge.net/' -description = """Portable interactive, function plotting utility""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'cstd': 'c++11'} - -sources = [SOURCE_TAR_GZ] -source_urls = [('http://sourceforge.net/projects/gnuplot/files', 'download')] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('cairo', '1.17.2'), - ('libjpeg-turbo', '2.0.5'), - ('libreadline', '8.0'), - ('libpng', '1.6.37'), - ('libgd', '2.3.0'), - ('Pango', '1.44.7'), - ('libcerf', '1.13'), - ('Qt5', '5.14.2'), -] - -preconfigopts = 'LIBS="$LIBS -lgio-2.0 -lgobject-2.0" ' -configopts = '--with-qt=qt5 --without-latex --disable-wxwidgets ' - -sanity_check_paths = { - 'files': ['bin/gnuplot'], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/gnuplot/gnuplot-5.2.8-GCCcore-9.3.0.eb b/Golden_Repo/g/gnuplot/gnuplot-5.2.8-GCCcore-9.3.0.eb deleted file mode 100644 index c9da13df1b5d8abf1fa8039e32e3d52e9407f677..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gnuplot/gnuplot-5.2.8-GCCcore-9.3.0.eb +++ /dev/null @@ -1,52 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg/Luxembourg Centre for Systems Biomedicine -# Authors:: Fotis Georgatos <fotis.georgatos@uni.lu> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-97.html -## -easyblock = 'ConfigureMake' - -name = 'gnuplot' -version = '5.2.8' - -homepage = 'http://gnuplot.sourceforge.net/' -description = """Portable interactive, function plotting utility""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'cstd': 'c++11'} - -sources = [SOURCE_TAR_GZ] -source_urls = [('http://sourceforge.net/projects/gnuplot/files', 'download')] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('cairo', '1.17.2'), - ('libjpeg-turbo', '2.0.5'), - ('libreadline', '8.0'), - ('libpng', '1.6.37'), - ('libgd', '2.3.0'), - ('Pango', '1.44.7'), - ('libcerf', '1.13'), - ('Qt5', '5.14.2'), -] - -preconfigopts = 'LIBS="$LIBS -lgio-2.0 -lgobject-2.0" ' -configopts = '--with-qt=qt5 --without-latex --disable-wxwidgets ' - -sanity_check_paths = { - 'files': ['bin/gnuplot'], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/gomkl/gomkl-2020.eb b/Golden_Repo/g/gomkl/gomkl-2020.eb deleted file mode 100644 index 34c762b1aace35d922855b334c7248e47b10bef3..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gomkl/gomkl-2020.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = "Toolchain" - -name = 'gomkl' -version = '2020' - -homepage = '(none)' -description = """GCC and GFortran based compiler toolchain, ParaStation MPICH variant for MPI support and MKL""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_comp_name = 'GCC' -local_comp_version = '9.3.0' -local_comp = (local_comp_name, local_comp_version) - -# toolchain used to build dependencies -local_comp_mpi_tc_name = 'gompi' -local_comp_mpi_tc_ver = version -local_comp_mpi_tc = (local_comp_mpi_tc_name, local_comp_mpi_tc_ver) - -# compiler toolchain dependencies -dependencies = [ - local_comp, - ('OpenMPI', '4.1.0rc1', '', local_comp), # part of gompi toolchain - ('imkl', '2020.2.254', '', local_comp_mpi_tc), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gomkl/gomkl-2021.eb b/Golden_Repo/g/gomkl/gomkl-2021.eb deleted file mode 100644 index 3521b374e4a5a733fef64a96157e175a0b932241..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gomkl/gomkl-2021.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = "Toolchain" - -name = 'gomkl' -version = '2021' - -homepage = '(none)' -description = """GCC and GFortran based compiler toolchain, ParaStation MPICH variant for MPI support and MKL""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_comp_name = 'GCC' -local_comp_version = '10.3.0' -local_comp = (local_comp_name, local_comp_version) - -# toolchain used to build dependencies -local_comp_mpi_tc_name = 'gompi' -local_comp_mpi_tc_ver = version -local_comp_mpi_tc = (local_comp_mpi_tc_name, local_comp_mpi_tc_ver) - -# compiler toolchain dependencies -dependencies = [ - local_comp, - ('OpenMPI', '4.1.1', '', local_comp), # part of gompi toolchain - ('imkl', '2021.2.0', '', local_comp_mpi_tc), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gompi/gompi-2020.eb b/Golden_Repo/g/gompi/gompi-2020.eb deleted file mode 100644 index 589135846b0cd687e26ddb1e06ece101c324a3e2..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gompi/gompi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = "Toolchain" - -name = 'gompi' -version = '2020' - -homepage = '(none)' -description = """gcc and GFortran based compiler toolchain, - including Parastation MPICH2 for MPI support. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_mpilib = 'OpenMPI' -local_mpiver = '4.1.0rc1' - -local_compname = 'GCC' -local_compver = '9.3.0' - -local_comp = (local_compname, local_compver) - -dependencies = [ - local_comp, - (local_mpilib, local_mpiver, '', local_comp), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gompi/gompi-2021.eb b/Golden_Repo/g/gompi/gompi-2021.eb deleted file mode 100644 index c0c8134836491eb22269845cd0b9c271d91276a4..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gompi/gompi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = "Toolchain" - -name = 'gompi' -version = '2021' - -homepage = '(none)' -description = """gcc and GFortran based compiler toolchain, - including Parastation MPICH2 for MPI support. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_mpilib = 'OpenMPI' -local_mpiver = '4.1.1' - -local_compname = 'GCC' -local_compver = '10.3.0' - -local_comp = (local_compname, local_compver) - -dependencies = [ - local_comp, - (local_mpilib, local_mpiver, '', local_comp), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gperf/gperf-3.1-GCCcore-10.3.0.eb b/Golden_Repo/g/gperf/gperf-3.1-GCCcore-10.3.0.eb deleted file mode 100644 index d69d69e3442ed1765a4a97d332ade28c026e107d..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gperf/gperf-3.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gperf' -version = '3.1' - -homepage = 'http://www.gnu.org/software/gperf/' -description = """GNU gperf is a perfect hash function generator. For a given list of strings, it produces a hash - function and hash table, in form of C or C++ code, for looking up a value depending on the input string. The hash - function is perfect, which means that the hash table has no collisions, and the hash table lookup needs a single - string comparison only. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -sanity_check_paths = { - 'files': ['bin/gperf'], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/gperf/gperf-3.1-GCCcore-9.3.0.eb b/Golden_Repo/g/gperf/gperf-3.1-GCCcore-9.3.0.eb deleted file mode 100644 index 203375757e27bf04dc7fce0f2af97301d89b535d..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gperf/gperf-3.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gperf' -version = '3.1' - -homepage = 'http://www.gnu.org/software/gperf/' -description = """GNU gperf is a perfect hash function generator. For a given list of strings, it produces a hash - function and hash table, in form of C or C++ code, for looking up a value depending on the input string. The hash - function is perfect, which means that the hash table has no collisions, and the hash table lookup needs a single - string comparison only. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -sanity_check_paths = { - 'files': ['bin/gperf'], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/gpsmkl/gpsmkl-2020.eb b/Golden_Repo/g/gpsmkl/gpsmkl-2020.eb deleted file mode 100644 index e1d474102c36f1ca4a5104d2016f9caedbff2141..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gpsmkl/gpsmkl-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = "Toolchain" - -name = 'gpsmkl' -version = '2020' - -homepage = '(none)' -description = 'GCC and GFortran based compiler toolchain, ParaStation MPICH variant for MPI support and MKL' - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -local_compiler = ('GCC', '9.3.0') - -# toolchain used to build dependencies -local_comp_mpi_tc = ('gpsmpi', version) - -# compiler toolchain dependencies -dependencies = [ - local_compiler, - ('psmpi', '5.4.7-1', '', local_compiler), # part of gpsmpi toolchain - ('imkl', '2020.2.254', '', local_comp_mpi_tc), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gpsmkl/gpsmkl-2021.eb b/Golden_Repo/g/gpsmkl/gpsmkl-2021.eb deleted file mode 100644 index fc53914f12721a9dbaad9d3c5a194a949afef334..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gpsmkl/gpsmkl-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = "Toolchain" - -name = 'gpsmkl' -version = '2021' - -homepage = '(none)' -description = 'GCC and GFortran based compiler toolchain, ParaStation MPICH variant for MPI support and MKL' - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -local_compiler = ('GCC', '10.3.0') - -# toolchain used to build dependencies -local_comp_mpi_tc = ('gpsmpi', version) - -# compiler toolchain dependencies -dependencies = [ - local_compiler, - ('psmpi', '5.4.9-1', '', local_compiler), # part of gpsmpi toolchain - ('imkl', '2021.2.0', '', local_comp_mpi_tc), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gpsmpi/gpsmpi-2020.eb b/Golden_Repo/g/gpsmpi/gpsmpi-2020.eb deleted file mode 100644 index 509c3db2a392c3147dbab0b8164ba66b0bfeaa20..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gpsmpi/gpsmpi-2020.eb +++ /dev/null @@ -1,20 +0,0 @@ -easyblock = 'Toolchain' - -name = 'gpsmpi' -version = '2020' - -homepage = '(none)' -description = 'GCC and GFortran based compiler toolchain, including Parastation MPICH2 for MPI support.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compiler = ('GCC', '9.3.0') - -dependencies = [ - local_compiler, - ('psmpi', '5.4.7-1', '', local_compiler), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gpsmpi/gpsmpi-2021.eb b/Golden_Repo/g/gpsmpi/gpsmpi-2021.eb deleted file mode 100644 index c350ae4fde936cb4ebc90d8159294c18fe601fe6..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gpsmpi/gpsmpi-2021.eb +++ /dev/null @@ -1,20 +0,0 @@ -easyblock = 'Toolchain' - -name = 'gpsmpi' -version = '2021' - -homepage = '(none)' -description = 'GCC and GFortran based compiler toolchain, including Parastation MPICH2 for MPI support.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compiler = ('GCC', '10.3.0') - -dependencies = [ - local_compiler, - ('psmpi', '5.4.9-1', '', local_compiler), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/g/gsettings-desktop-schemas/gsettings-desktop-schemas-3.34.0-GCCcore-10.3.0.eb b/Golden_Repo/g/gsettings-desktop-schemas/gsettings-desktop-schemas-3.34.0-GCCcore-10.3.0.eb deleted file mode 100644 index 283eb30f5daf6009974402ddf9c3f514a02c6e72..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gsettings-desktop-schemas/gsettings-desktop-schemas-3.34.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'gsettings-desktop-schemas' -version = '3.34.0' - -homepage = 'https://github.com/GNOME/gsettings-desktop-schemas' -description = ''' -gsettings-desktop-schemas contains a collection of GSettings schemas for settings shared by various components of a -desktop. -''' - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), -] - -dependencies = [ - ('GTK+', '3.24.17'), -] - -configopts = '-Ddefault_library=both' - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['include/gsettings-desktop-schemas/gdesktop-enums.h'], - 'dirs': ['share'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/gsettings-desktop-schemas/gsettings-desktop-schemas-3.34.0-GCCcore-9.3.0.eb b/Golden_Repo/g/gsettings-desktop-schemas/gsettings-desktop-schemas-3.34.0-GCCcore-9.3.0.eb deleted file mode 100644 index 9b9e1d6c91a190ccb7d5b5d49254710f9b8587d7..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gsettings-desktop-schemas/gsettings-desktop-schemas-3.34.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'gsettings-desktop-schemas' -version = '3.34.0' - -homepage = 'https://github.com/GNOME/gsettings-desktop-schemas' -description = ''' -gsettings-desktop-schemas contains a collection of GSettings schemas for settings shared by various components of a -desktop. -''' - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), -] - -dependencies = [ - ('GTK+', '3.24.17'), -] - -configopts = '-Ddefault_library=both' - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['include/gsettings-desktop-schemas/gdesktop-enums.h'], - 'dirs': ['share'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/g/guile/guile-2.0.14-GCCcore-10.3.0.eb b/Golden_Repo/g/guile/guile-2.0.14-GCCcore-10.3.0.eb deleted file mode 100644 index 7a97f7fdd3189c167e774722a88979d05169ffc9..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/guile/guile-2.0.14-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'guile' -version = '2.0.14' - -homepage = 'http://www.gnu.org/software/guile' -description = """Guile is the GNU Ubiquitous Intelligent Language for Extensions, the official extension language for -the GNU operating system. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('libtool', '2.4.6'), - ('GMP', '6.2.0'), - ('libunistring', '0.9.10'), - ('libffi', '3.3'), - ('libreadline', '8.0'), - ('XZ', '5.2.5'), - ('gc', '8.0.2'), -] - -configopts = " --enable-error-on-warning=no" - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["guile", 'guile-config', 'guile-snarf', 'guile-tools']] + - ["lib/libguile-%(version_major_minor)s.a", "include/guile/%(version_major_minor)s/libguile.h"], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/guile/guile-2.0.14-GCCcore-9.3.0.eb b/Golden_Repo/g/guile/guile-2.0.14-GCCcore-9.3.0.eb deleted file mode 100644 index e09782f5a6ea107bb8f675f11b4e3a657f49df5e..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/guile/guile-2.0.14-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'guile' -version = '2.0.14' - -homepage = 'http://www.gnu.org/software/guile' -description = """Guile is the GNU Ubiquitous Intelligent Language for Extensions, the official extension language for -the GNU operating system. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('libtool', '2.4.6'), - ('GMP', '6.2.0'), - ('libunistring', '0.9.10'), - ('libffi', '3.3'), - ('libreadline', '8.0'), - ('XZ', '5.2.5'), - ('gc', '8.0.2'), -] - -configopts = " --enable-error-on-warning=no" - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["guile", 'guile-config', 'guile-snarf', 'guile-tools']] + - ["lib/libguile-%(version_major_minor)s.a", "include/guile/%(version_major_minor)s/libguile.h"], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/guile/guile-2.2.4-GCCcore-10.3.0.eb b/Golden_Repo/g/guile/guile-2.2.4-GCCcore-10.3.0.eb deleted file mode 100644 index a3e5bf86e2bd0e46e04c3c8d56a3b28f86f22973..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/guile/guile-2.2.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'guile' -version = '2.2.4' - -homepage = 'http://www.gnu.org/software/guile' -description = """Guile is the GNU Ubiquitous Intelligent Language for Extensions, the official extension language for -the GNU operating system. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('libtool', '2.4.6'), - ('GMP', '6.2.0'), - ('libunistring', '0.9.10'), - ('libffi', '3.3'), - ('libreadline', '8.0'), - ('XZ', '5.2.5'), - ('gc', '8.0.2'), -] - -configopts = " --enable-error-on-warning=no" - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["guile", 'guile-config', 'guile-snarf', 'guile-tools']] + - ["lib/libguile-%(version_major_minor)s.a", "include/guile/%(version_major_minor)s/libguile.h"], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/guile/guile-2.2.4-GCCcore-9.3.0.eb b/Golden_Repo/g/guile/guile-2.2.4-GCCcore-9.3.0.eb deleted file mode 100644 index 3ec807f7b41fd75a6a66562a9771dbd9a52c95f3..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/guile/guile-2.2.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'guile' -version = '2.2.4' - -homepage = 'http://www.gnu.org/software/guile' -description = """Guile is the GNU Ubiquitous Intelligent Language for Extensions, the official extension language for -the GNU operating system. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('libtool', '2.4.6'), - ('GMP', '6.2.0'), - ('libunistring', '0.9.10'), - ('libffi', '3.3'), - ('libreadline', '8.0'), - ('XZ', '5.2.5'), - ('gc', '8.0.2'), -] - -configopts = " --enable-error-on-warning=no" - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["guile", 'guile-config', 'guile-snarf', 'guile-tools']] + - ["lib/libguile-%(version_major_minor)s.a", "include/guile/%(version_major_minor)s/libguile.h"], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/g/gzip/gzip-1.10-GCCcore-10.3.0.eb b/Golden_Repo/g/gzip/gzip-1.10-GCCcore-10.3.0.eb deleted file mode 100644 index 4be9c41d3625266ca77bf12bc70e172244ece9cd..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gzip/gzip-1.10-GCCcore-10.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gzip' -version = '1.10' - -homepage = 'http://www.gnu.org/software/gzip/' -description = """gzip (GNU zip) is a popular data compression program as a -replacement for compress""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [('binutils', '2.36.1')] - -sanity_check_paths = { - 'files': ["bin/gunzip", "bin/gzip", "bin/uncompress"], - 'dirs': [], -} - -sanity_check_commands = [True, ('gzip', '--version')] - -moduleclass = 'tools' diff --git a/Golden_Repo/g/gzip/gzip-1.10-GCCcore-9.3.0.eb b/Golden_Repo/g/gzip/gzip-1.10-GCCcore-9.3.0.eb deleted file mode 100644 index 23ec903c4439c4e0a8ca5563e1e2a97c566c1791..0000000000000000000000000000000000000000 --- a/Golden_Repo/g/gzip/gzip-1.10-GCCcore-9.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'gzip' -version = '1.10' - -homepage = 'http://www.gnu.org/software/gzip/' -description = """gzip (GNU zip) is a popular data compression program as a -replacement for compress""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [('binutils', '2.34')] - -sanity_check_paths = { - 'files': ["bin/gunzip", "bin/gzip", "bin/uncompress"], - 'dirs': [], -} - -sanity_check_commands = [True, ('gzip', '--version')] - -moduleclass = 'tools' diff --git a/Golden_Repo/h/HDF/HDF-4.2.15-GCC-9.3.0.eb b/Golden_Repo/h/HDF/HDF-4.2.15-GCC-9.3.0.eb deleted file mode 100644 index 6d7aa30ea9f519149816b1dddc4a2b38df2dabe4..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF/HDF-4.2.15-GCC-9.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HDF' -version = '4.2.15' - -homepage = 'http://www.hdfgroup.org/products/hdf4/' -description = """HDF (also known as HDF4) is a library and multi-object file format for storing - and managing data between machines. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('Java', '15', '', SYSTEM), -] - -dependencies = [ - ('Szip', '2.1.1'), - ('zlib', '1.2.11'), - ('JasPer', '2.0.19'), - ('libjpeg-turbo', '2.0.5'), -] - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.split('-')[0]] - -preconfigopts = 'export CPATH="/usr/include/tirpc:$CPATH" && ' -preconfigopts += 'export LIBS="-ltirpc $LIBS" && ' -configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --enable-java ' -configopts += '--includedir=%(installdir)s/include/%(namelower)s ' - -prebuildopts = 'export CPATH="/usr/include/tirpc:$CPATH" && ' - -sanity_check_paths = { - 'files': ['lib/libdf.a', 'lib/libhdf4.settings', 'lib/libmfhdf.a'], - 'dirs': ['bin', 'include/hdf'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF/HDF-4.2.15-GCCcore-10.3.0.eb b/Golden_Repo/h/HDF/HDF-4.2.15-GCCcore-10.3.0.eb deleted file mode 100644 index b89bc187389d703a8a56abd6d97703124c6b1c04..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF/HDF-4.2.15-GCCcore-10.3.0.eb +++ /dev/null @@ -1,69 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HDF' -version = '4.2.15' - -homepage = 'https://www.hdfgroup.org/products/hdf4/' - -description = """ - HDF (also known as HDF4) is a library and multi-object file format for - storing and managing data between machines. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%(version)s/src/'] -sources = [SOURCELOWER_TAR_GZ] -patches = ['HDF-4.2.15_fix-aarch64.patch'] -checksums = [ - 'dbeeef525af7c2d01539906c28953f0fdab7dba603d1bc1ec4a5af60d002c459', # hdf-4.2.15.tar.gz - '1b4341e309cccefc6ea4310c8f8b08cc3dfe1fa9609b7fa7aee80e4dac598473', # HDF-4.2.15_fix-aarch64.patch -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Bison', '3.7.6'), - ('flex', '2.6.4'), - ('Java', '15', '', SYSTEM), -] - -dependencies = [ - ('libjpeg-turbo', '2.0.5'), - ('Szip', '2.1.1'), - ('zlib', '1.2.11'), - ('JasPer', '2.0.19'), - ('libtirpc', '1.3.2'), -] - -preconfigopts = "LIBS='-ltirpc' " - -local_common_configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB ' -local_common_configopts += 'CFLAGS="$CFLAGS -I$EBROOTLIBTIRPC/include/tirpc" ' -local_common_configopts += '--includedir=%(installdir)s/include/%(namelower)s ' -local_common_configopts += '--enable-java ' -configopts = [ - # -fallow-argument-mismatch is required to compile with GCC 10.x - local_common_configopts + 'FFLAGS="$FFLAGS -fallow-argument-mismatch"', - # Cannot build shared libraries and Fortran... - # https://trac.osgeo.org/gdal/wiki/HDF#IncompatibilitywithNetCDFLibraries - # netcdf must be disabled to allow HDF to be used by GDAL - local_common_configopts + "--enable-shared --disable-fortran --disable-netcdf", -] - -modextrapaths = {'CPATH': 'include/hdf'} - -sanity_check_paths = { - 'files': ['bin/h4cc', 'bin/ncdump', 'lib/libdf.a', 'lib/libhdf4.settings', 'lib/libmfhdf.a', - 'lib/libmfhdf.%s' % SHLIB_EXT], - 'dirs': ['include/hdf'], -} - -sanity_check_commands = [ - "h4cc --help", - "ncdump -V", -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 437dc04eaf54f04616c25eda39085499a44fe6ed..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HDF' -version = '4.2.15' - -homepage = 'http://www.hdfgroup.org/products/hdf4/' -description = """HDF (also known as HDF4) is a library and multi-object file format for storing - and managing data between machines. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('Java', '15', '', SYSTEM), -] - -dependencies = [ - ('Szip', '2.1.1'), - ('zlib', '1.2.11'), - ('JasPer', '2.0.19'), - ('libjpeg-turbo', '2.0.5'), -] - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.split('-')[0]] - -preconfigopts = 'export CPATH="/usr/include/tirpc:$CPATH" && ' -preconfigopts += 'export LIBS="-ltirpc $LIBS" && ' -configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --enable-java ' -configopts += '--includedir=%(installdir)s/include/%(namelower)s ' - -prebuildopts = 'export CPATH="/usr/include/tirpc:$CPATH" && ' - -sanity_check_paths = { - 'files': ['lib/libdf.a', 'lib/libhdf4.settings', 'lib/libmfhdf.a'], - 'dirs': ['bin', 'include/hdf'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-GCCcore-10.3.0-serial.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-GCCcore-10.3.0-serial.eb deleted file mode 100644 index af96b4bc27ac0e376d248e85126e7017221cfa56..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-GCCcore-10.3.0-serial.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'HDF5' -version = '1.10.6' -versionsuffix = '-serial' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'optarch': True, 'pic': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Java', '15', '', SYSTEM) -] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -configopts = '--enable-java' - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-GCCcore-9.3.0-serial.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-GCCcore-9.3.0-serial.eb deleted file mode 100644 index 4e9795c14cfea61c371e6345370c9b51db261870..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-GCCcore-9.3.0-serial.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'HDF5' -version = '1.10.6' -versionsuffix = '-serial' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'optarch': True, 'pic': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -builddependencies = [ - ('binutils', '2.34'), - ('Java', '15', '', SYSTEM) -] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -configopts = '--enable-java' - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-gompi-2020.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-gompi-2020.eb deleted file mode 100644 index 233bd2e00706d44adb945dd7d5e8322bf6b2442f..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-gompi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-gompi-2021.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-gompi-2021.eb deleted file mode 100644 index 37ac78baad92042f84654810a05faa2ed8689d68..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-gompi-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-gpsmpi-2020.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-gpsmpi-2020.eb deleted file mode 100644 index 336a4ded69493ddf2c60dedeed0a033db28fbe54..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-gpsmpi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-gpsmpi-2021.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-gpsmpi-2021.eb deleted file mode 100644 index 4e8c7dd0f195ffef627a2e3187bed31d31d34eaf..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-gpsmpi-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-iimpi-2020.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-iimpi-2020.eb deleted file mode 100644 index 0abd8acda588265cac6c56e0b1339c21e6d4ec00..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-iimpi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-iimpi-2021.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-iimpi-2021.eb deleted file mode 100644 index a40a05b103079f05330830ec53debf002b75dd0a..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-iimpi-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-iompi-2020.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-iompi-2020.eb deleted file mode 100644 index a534a2fcd5bf37f1ea4e6e0a80427e2d9f1eb318..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-iompi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-iompi-2021.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-iompi-2021.eb deleted file mode 100644 index 15f788ab4589c6ab46f6b742e4b279e8a63116e2..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-iompi-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2020-mt.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2020-mt.eb deleted file mode 100644 index 0853cd5096a183fa24f8ce6020093643d4214d84..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb deleted file mode 100644 index 6d5fe53a3d7252e12d1b3ff030efbbdfe81d69ac..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb deleted file mode 100644 index 7f681cce92fc12d4bd27be2c31572a7b0152feb0..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2020.1.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2020.1.eb deleted file mode 100644 index c8bae61d1e07502428d0e88043c90d582f970abe..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2020.1.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2020.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2020.eb deleted file mode 100644 index c5e190e8281addb4fdc905158b189365f3014ed8..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2021.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2021.eb deleted file mode 100644 index 86aef3a41d6f599674f3610117fa1f8ed6877d78..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-npsmpic-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDF5/HDF5-1.10.6-nvompic-2021.eb b/Golden_Repo/h/HDF5/HDF5-1.10.6-nvompic-2021.eb deleted file mode 100644 index 6276c2322a67c7beaba98d7bcda59cb2904b5942..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDF5/HDF5-1.10.6-nvompic-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'nvompic', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/h/HDFView/HDFView-2.14-GCC-9.3.0.eb b/Golden_Repo/h/HDFView/HDFView-2.14-GCC-9.3.0.eb deleted file mode 100644 index 1b24781cf8f543eff5bafa69cde65b7c5a7e8d58..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDFView/HDFView-2.14-GCC-9.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -easyblock = 'PackedBinary' - -name = 'HDFView' -version = '3.1.1' - -homepage = 'https://www.hdfgroup.org/products/java/' -description = """The HDF Java Products consist of HDFView (a Java browser for HDF4 and HDF5 files) and the HDF-Java wrappers. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['https://support.hdfgroup.org/ftp/HDF5/releases/HDF-JAVA/hdfview-%(version)s/src/'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('ant', '1.10.9', '-Java-%(javaver)s', SYSTEM), -] - -dependencies = [ - ('Java', '15', '', SYSTEM), - ('HDF', '4.2.15'), - ('HDF5', '1.10.6', '-serial'), -] - -install_cmd = ( - # build HDFView - 'export HDFLIBS=$EBROOTHDF && ' - 'export HDF5LIBS=$EBROOTHDF5 && ' - 'cd %(builddir)s/hdfview-%(version)s && ' - 'ant run && ' - 'ant package && ' - 'tar -xzf build/dist/HDFView-%(version)s-Linux-x86_64.tar.gz && ' - # install HDFView - 'cd %(installdir)s && ' - '%(builddir)s/hdfview-%(version)s/build/dist/HDFView-%(version)s-Linux.sh --skip-license && ' - 'mv %(installdir)s/HDFView/%(version)s/* %(installdir)s && ' - 'mkdir %(installdir)s/bin && ' - 'mv %(installdir)s/hdfview.sh %(installdir)s/bin/ && ' - 'ln -s %(installdir)s/bin/hdfview.sh %(installdir)s/bin/hdfview && ' - "sed -i 's#JAVABIN=.*#JAVABIN=$JAVA_HOME/bin#g' %(installdir)s/bin/hdfview.sh && " - 'sed -i "s#INSTALLDIR=.*#INSTALLDIR=%(installdir)s#g" %(installdir)s/bin/hdfview.sh ' -) - -moduleclass = 'vis' diff --git a/Golden_Repo/h/HDFView/HDFView-3.1.3-GCCcore-10.3.0.eb b/Golden_Repo/h/HDFView/HDFView-3.1.3-GCCcore-10.3.0.eb deleted file mode 100644 index c340fd0bb39272c86c4589378a969f68fd9c5c02..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HDFView/HDFView-3.1.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'PackedBinary' - -name = 'HDFView' -version = '3.1.3' - -homepage = 'https://www.hdfgroup.org/products/java/' -description = """The HDF Java Products consist of HDFView (a Java browser for HDF4 and HDF5 files) and the HDF-Java wrappers. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://support.hdfgroup.org/ftp/HDF5/releases/HDF-JAVA/hdfview-%(version)s/src/'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('ant', '1.10.9', '-Java-%(javaver)s', SYSTEM), -] - -dependencies = [ - ('Java', '15', '', SYSTEM), - ('HDF', '4.2.15'), - ('HDF5', '1.10.6', '-serial'), -] - -install_cmd = ( - # build HDFView - 'export HDFLIBS=$EBROOTHDF && ' - 'export HDF5LIBS=$EBROOTHDF5 && ' - 'cd %(builddir)s/hdfview-%(version)s && ' - 'ant deploy && ' - 'ant binaryPack && ' - # install HDFView - 'cd %(installdir)s && ' - 'cp -a %(builddir)s/hdfview-%(version)s/build/dist/HDFView/* . && ' - 'ln -s %(installdir)s/lib/app/hdfview.sh %(installdir)s/bin/ && ' - "sed -i 's#JAVABIN=.*#JAVABIN=$JAVA_HOME/bin#g' %(installdir)s/bin/hdfview.sh && " - 'sed -i "s#INSTALLDIR=.*#INSTALLDIR=%(installdir)s#g" %(installdir)s/bin/hdfview.sh ' -) - -moduleclass = 'vis' diff --git a/Golden_Repo/h/HarfBuzz/HarfBuzz-2.6.7-GCCcore-10.3.0.eb b/Golden_Repo/h/HarfBuzz/HarfBuzz-2.6.7-GCCcore-10.3.0.eb deleted file mode 100644 index 63115b127ee218586e2334ffdb92e9bb79c3fc49..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HarfBuzz/HarfBuzz-2.6.7-GCCcore-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HarfBuzz' -version = '2.6.7' - -homepage = 'http://www.freedesktop.org/wiki/Software/HarfBuzz' -description = """HarfBuzz is an OpenType text shaping engine. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://www.freedesktop.org/software/harfbuzz/release/'] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Coreutils', '8.32'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1') -] - -dependencies = [ - ('X11', '20200222'), - ('GLib', '2.64.4'), - ('cairo', '1.17.2'), - ('freetype', '2.10.1'), -] - -configopts = "--enable-introspection=yes --with-gobject=yes --enable-static --enable-shared --with-cairo " - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib64/libharfbuzz.%s' % SHLIB_EXT, 'bin/hb-view'], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/h/HarfBuzz/HarfBuzz-2.6.7-GCCcore-9.3.0.eb b/Golden_Repo/h/HarfBuzz/HarfBuzz-2.6.7-GCCcore-9.3.0.eb deleted file mode 100644 index c15e8b1be0eece7e37943e0a6da6a56e9eefc951..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/HarfBuzz/HarfBuzz-2.6.7-GCCcore-9.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HarfBuzz' -version = '2.6.7' - -homepage = 'http://www.freedesktop.org/wiki/Software/HarfBuzz' -description = """HarfBuzz is an OpenType text shaping engine. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://www.freedesktop.org/software/harfbuzz/release/'] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('Coreutils', '8.32'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1') -] - -dependencies = [ - ('X11', '20200222'), - ('GLib', '2.64.4'), - ('cairo', '1.17.2'), - ('freetype', '2.10.1'), -] - -configopts = "--enable-introspection=yes --with-gobject=yes --enable-static --enable-shared --with-cairo " - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -sanity_check_paths = { - 'files': ['lib64/libharfbuzz.%s' % SHLIB_EXT, 'bin/hb-view'], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-2020.eb b/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-2020.eb deleted file mode 100644 index 4b2e45b52f382c84bd3655125d975a2979680fa6..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-2020.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Harminv' -version = '1.4.1' - -homepage = 'http://ab-initio.mit.edu/wiki/index.php/Harminv' -description = """ -Harminv is a free program (and accompanying library) to solve the problem of harmonic inversion - given a discrete-time, -finite-length signal that consists of a sum of finitely-many sinusoids (possibly exponentially decaying) in a given -bandwidth, it determines the frequencies, decay constants, amplitudes, and phases of those sinusoids. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'opt': True, 'unroll': True, 'optarch': True, 'pic': True, 'cstd': 'c99'} - -source_urls = ['https://github.com/stevengj/harminv/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic --with-blas=mkl_em64t --with-lapack=mkl_em64t --enable-shared" - -moduleclass = 'math' diff --git a/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-2021.eb b/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-2021.eb deleted file mode 100644 index 6ec8d71b8638609109a1df132d47d4ce5258ac45..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Harminv' -version = '1.4.1' - -homepage = 'http://ab-initio.mit.edu/wiki/index.php/Harminv' -description = """ -Harminv is a free program (and accompanying library) to solve the problem of harmonic inversion - given a discrete-time, -finite-length signal that consists of a sum of finitely-many sinusoids (possibly exponentially decaying) in a given -bandwidth, it determines the frequencies, decay constants, amplitudes, and phases of those sinusoids. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'opt': True, 'unroll': True, 'optarch': True, 'pic': True, 'cstd': 'c99'} - -source_urls = ['https://github.com/stevengj/harminv/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic --with-blas=mkl_em64t --with-lapack=mkl_em64t --enable-shared" - -moduleclass = 'math' diff --git a/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-para-2020.eb b/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-para-2020.eb deleted file mode 100644 index e97ff3717e4e6691b53e017a29b1a029962b3465..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-para-2020.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Harminv' -version = '1.4.1' - -homepage = 'http://ab-initio.mit.edu/wiki/index.php/Harminv' -description = """ -Harminv is a free program (and accompanying library) to solve the problem of harmonic inversion - given a discrete-time, -finite-length signal that consists of a sum of finitely-many sinusoids (possibly exponentially decaying) in a given -bandwidth, it determines the frequencies, decay constants, amplitudes, and phases of those sinusoids. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'opt': True, 'unroll': True, 'optarch': True, 'pic': True, 'cstd': 'c99'} - -source_urls = ['https://github.com/stevengj/harminv/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic --with-blas=mkl_em64t --with-lapack=mkl_em64t --enable-shared" - -moduleclass = 'math' diff --git a/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-para-2021.eb b/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-para-2021.eb deleted file mode 100644 index 7c76c3f764ae9b58568ad009893c33f05e4d0000..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Harminv/Harminv-1.4.1-intel-para-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Harminv' -version = '1.4.1' - -homepage = 'http://ab-initio.mit.edu/wiki/index.php/Harminv' -description = """ -Harminv is a free program (and accompanying library) to solve the problem of harmonic inversion - given a discrete-time, -finite-length signal that consists of a sum of finitely-many sinusoids (possibly exponentially decaying) in a given -bandwidth, it determines the frequencies, decay constants, amplitudes, and phases of those sinusoids. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'opt': True, 'unroll': True, 'optarch': True, 'pic': True, 'cstd': 'c99'} - -source_urls = ['https://github.com/stevengj/harminv/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic --with-blas=mkl_em64t --with-lapack=mkl_em64t --enable-shared" - -moduleclass = 'math' diff --git a/Golden_Repo/h/Harminv/Harminv-1.4.1-iomkl-2020.eb b/Golden_Repo/h/Harminv/Harminv-1.4.1-iomkl-2020.eb deleted file mode 100644 index f5171666628088f01ef639401a0777866260f3d5..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Harminv/Harminv-1.4.1-iomkl-2020.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Harminv' -version = '1.4.1' - -homepage = 'http://ab-initio.mit.edu/wiki/index.php/Harminv' -description = """ -Harminv is a free program (and accompanying library) to solve the problem of harmonic inversion - given a discrete-time, -finite-length signal that consists of a sum of finitely-many sinusoids (possibly exponentially decaying) in a given -bandwidth, it determines the frequencies, decay constants, amplitudes, and phases of those sinusoids. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iomkl', 'version': '2020'} -toolchainopts = {'opt': True, 'unroll': True, 'optarch': True, 'pic': True, 'cstd': 'c99'} - -source_urls = ['https://github.com/stevengj/harminv/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic --with-blas=mkl_em64t --with-lapack=mkl_em64t --enable-shared" - -moduleclass = 'math' diff --git a/Golden_Repo/h/Harminv/Harminv-1.4.1-iomkl-2021.eb b/Golden_Repo/h/Harminv/Harminv-1.4.1-iomkl-2021.eb deleted file mode 100644 index f7b6d5ed5a90e67ec6c87036aa01e2463f30c098..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Harminv/Harminv-1.4.1-iomkl-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Harminv' -version = '1.4.1' - -homepage = 'http://ab-initio.mit.edu/wiki/index.php/Harminv' -description = """ -Harminv is a free program (and accompanying library) to solve the problem of harmonic inversion - given a discrete-time, -finite-length signal that consists of a sum of finitely-many sinusoids (possibly exponentially decaying) in a given -bandwidth, it determines the frequencies, decay constants, amplitudes, and phases of those sinusoids. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'opt': True, 'unroll': True, 'optarch': True, 'pic': True, 'cstd': 'c99'} - -source_urls = ['https://github.com/stevengj/harminv/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -configopts = "--with-pic --with-blas=mkl_em64t --with-lapack=mkl_em64t --enable-shared" - -moduleclass = 'math' diff --git a/Golden_Repo/h/Horovod/Horovod-0.20.3-gomkl-2020-Python-3.8.5.eb b/Golden_Repo/h/Horovod/Horovod-0.20.3-gomkl-2020-Python-3.8.5.eb deleted file mode 100644 index 5f3d9fc6fdf60bf759e5bb4aa7c3a98eb7d8bed6..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Horovod/Horovod-0.20.3-gomkl-2020-Python-3.8.5.eb +++ /dev/null @@ -1,101 +0,0 @@ -# on juwels booster, one needs to call "export UCX_LOG_LEVEL=FATAL ebw ... - -easyblock = 'PythonBundle' - -name = 'Horovod' -version = '0.20.3' -local_tf_version = '2.3.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/uber/horovod' -description = "Horovod is a distributed training framework for TensorFlow and PyTorch." - -toolchain = {'name': 'gomkl', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -site_contacts = 'a.strube@fz-juelich.de' - -builddependencies = [ - ('CMake', '3.18.0'), -] -dependencies = [ - ('Python', '3.8.5'), - ('TensorFlow', local_tf_version, '-Python-%(pyver)s', ('gcccoremkl', '9.3.0-2020.2.254')), - ('UCX', '1.9.0', '', SYSTEM), # Forced because default was 1.8.1. If the default is 1.9.0, this can go away - ('PyTorch', '1.7.0', '-Python-%(pyver)s', ('gcccoremkl', '9.3.0-2020.2.254')), - ('NCCL', '2.8.3-1', '-CUDA-11.0'), -] - -use_pip = True -sanity_pip_check = True - -# possible vars: -# HOROVOD_BUILD_ARCH_FLAGS - additional C++ compilation flags to pass in for your build architecture. -# HOROVOD_CUDA_HOME - path where CUDA include and lib directories can be found. -# HOROVOD_BUILD_CUDA_CC_LIST - List of compute capabilities to build Horovod CUDA -# kernels for (example: HOROVOD_BUILD_CUDA_CC_LIST=60,70,75) -# HOROVOD_ROCM_HOME - path where ROCm include and lib directories can be found. -# HOROVOD_NCCL_HOME - path where NCCL include and lib directories can be found. -# HOROVOD_NCCL_INCLUDE - path to NCCL include directory. -# HOROVOD_NCCL_LIB - path to NCCL lib directory. -# HOROVOD_NCCL_LINK - {SHARED, STATIC}. Mode to link NCCL library. Defaults to STATIC for CUDA, SHARED for ROCm. -# HOROVOD_WITH_GLOO - {1}. Require that Horovod is built with Gloo support enabled. -# HOROVOD_WITHOUT_GLOO - {1}. Skip building with Gloo support. -# HOROVOD_WITH_MPI - {1}. Require that Horovod is built with MPI support enabled. -# HOROVOD_WITHOUT_MPI - {1}. Skip building with MPI support. -# HOROVOD_GPU - {CUDA, ROCM}. Framework to use for GPU operations. -# HOROVOD_GPU_OPERATIONS - {NCCL, MPI}. Framework to use for GPU tensor allreduce, allgather, and broadcast. -# HOROVOD_GPU_ALLREDUCE - {NCCL, MPI}. Framework to use for GPU tensor allreduce. -# HOROVOD_GPU_ALLGATHER - {NCCL, MPI}. Framework to use for GPU tensor allgather. -# HOROVOD_GPU_BROADCAST - {NCCL, MPI}. Framework to use for GPU tensor broadcast. -# HOROVOD_ALLOW_MIXED_GPU_IMPL - {1}. Allow Horovod to install with NCCL allreduce and MPI GPU allgather / -# broadcast. Not recommended due to a possible deadlock. -# HOROVOD_CPU_OPERATIONS - {MPI, GLOO, CCL}. Framework to use for CPU tensor allreduce, allgather, and broadcast. -# HOROVOD_CMAKE - path to the CMake binary used to build Gloo (not required when using MPI). -# HOROVOD_WITH_TENSORFLOW - {1}. Require Horovod to install with TensorFlow support enabled. -# HOROVOD_WITHOUT_TENSORFLOW - {1}. Skip installing TensorFlow support. -# HOROVOD_WITH_PYTORCH - {1}. Require Horovod to install with PyTorch support enabled. -# HOROVOD_WITHOUT_PYTORCH - {1}. Skip installing PyTorch support. -# HOROVOD_WITH_MXNET - {1}. Require Horovod to install with MXNet support enabled. -# HOROVOD_WITHOUT_MXNET - {1}. Skip installing MXNet support. - -# prebuildopts = 'export LDSHARED="$CC -shared" && ' -# prebuildopts += ' HOROVOD_WITH_TENSORFLOW=1 HOROVOD_WITHOUT_PYTORCH=1 HOROVOD_WITHOUT_MXNET=1 ' -# prebuildopts += ' HOROVOD_NCCL_LINK=SHARED HOROVOD_NCCL_HOME=$EBROOTNCCL ' -# prebuildopts += ' HOROVOD_GPU_OPERATIONS=NCCL ' -# prebuildopts += ' HOROVOD_CPU_OPERATIONS=MPI ' -# prebuildopts += ' HOROVOD_GPU_ALLREDUCE=NCCL ' -# prebuildopts += ' HOROVOD_GPU_BROADCAST=NCCL ' -# prebuildopts += ' HOROVOD_WITH_MPI=1 ' - - -prebuildopts = 'export LDSHARED="$CC -shared" && ' -prebuildopts += ' HOROVOD_NCCL_LINK=SHARED HOROVOD_GPU_ALLREDUCE=NCCL HOROVOD_NCCL_HOME=$EBROOTNCCL ' -prebuildopts += ' HOROVOD_WITH_TENSORFLOW=1 HOROVOD_WITH_PYTORCH=1' -prebuildopts += ' NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - -preinstallopts = prebuildopts - - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('horovod', version, { - 'checksums': ['6ebc90d627af486d44335ed48489e1e8dc190607574758867c52e4e17d75a247'], - 'cuda_compute_capabilities': ['7.0', '7.5', '8.0'], - }), -] - -sanity_check_paths = { - 'files': ['bin/horovodrun'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -# This makes openmpi work. It's up to the sysadmins to correct me here. -modextravars = {'HOROVOD_MPI_THREADS_DISABLE': '1'} - -modloadmsg = 'Setting HOROVOD_MPI_THREADS_DISABLE=1. ' - -moduleclass = 'tools' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-gomkl-2020-bigint.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-gomkl-2020-bigint.eb deleted file mode 100644 index e7733d80e52d8c76fa9feabee505bfe3e3ceb866..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-gomkl-2020-bigint.eb +++ /dev/null @@ -1,52 +0,0 @@ -name = "Hypre" -version = "2.20.0" -versionsuffix = "-bigint" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' -configopts += '--enable-bigint ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "mv %(installdir)s/examples/Makefile_gnu %(installdir)s/examples/Makefile", - "rm %(installdir)s/examples/Makefile*orig", - "rm %(installdir)s/examples/Makefile_*", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-gomkl-2020.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-gomkl-2020.eb deleted file mode 100644 index dc68e852c07b4705b8f38345b2291c507aab23c6..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-gomkl-2020.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = "Hypre" -version = "2.20.0" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "mv %(installdir)s/examples/Makefile_gnu %(installdir)s/examples/Makefile", - "rm %(installdir)s/examples/Makefile*orig", - "rm %(installdir)s/examples/Makefile_*", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-gpsmkl-2020-bigint.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-gpsmkl-2020-bigint.eb deleted file mode 100644 index db9788a1bd292eb62154fd7c360dbc46988d43bf..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-gpsmkl-2020-bigint.eb +++ /dev/null @@ -1,52 +0,0 @@ -name = "Hypre" -version = "2.20.0" -versionsuffix = "-bigint" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' -configopts += '--enable-bigint ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "mv %(installdir)s/examples/Makefile_gnu %(installdir)s/examples/Makefile", - "rm %(installdir)s/examples/Makefile*orig", - "rm %(installdir)s/examples/Makefile_*", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-gpsmkl-2020.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-gpsmkl-2020.eb deleted file mode 100644 index ed0604e97d338e34864871a51029407489519ba0..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-gpsmkl-2020.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = "Hypre" -version = "2.20.0" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "mv %(installdir)s/examples/Makefile_gnu %(installdir)s/examples/Makefile", - "rm %(installdir)s/examples/Makefile*orig", - "rm %(installdir)s/examples/Makefile_*", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-2020-bigint.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-2020-bigint.eb deleted file mode 100644 index 6ab8d251fb9f8c2127bcb2e66d49b864ba803eea..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-2020-bigint.eb +++ /dev/null @@ -1,51 +0,0 @@ -name = "Hypre" -version = "2.20.0" -versionsuffix = "-bigint" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' -configopts += '--enable-bigint ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "rm %(installdir)s/examples/Makefile_gnu*", - "rm %(installdir)s/examples/Makefile*orig", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-2020.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-2020.eb deleted file mode 100644 index c905ec8824057fd3980b3108dd74bbc54469abc7..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-2020.eb +++ /dev/null @@ -1,49 +0,0 @@ -name = "Hypre" -version = "2.20.0" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "rm %(installdir)s/examples/Makefile_gnu*", - "rm %(installdir)s/examples/Makefile*orig", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-para-2020-bigint.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-para-2020-bigint.eb deleted file mode 100644 index b00cb72f5837584282313dbabcf6c838d6d2399e..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-para-2020-bigint.eb +++ /dev/null @@ -1,51 +0,0 @@ -name = "Hypre" -version = "2.20.0" -versionsuffix = "-bigint" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' -configopts += '--enable-bigint ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "rm %(installdir)s/examples/Makefile_gnu*", - "rm %(installdir)s/examples/Makefile*orig", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-para-2020.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-para-2020.eb deleted file mode 100644 index 8ffce4e3d84aa9934e3766c774eb5c2ed09f2e54..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-intel-para-2020.eb +++ /dev/null @@ -1,49 +0,0 @@ -name = "Hypre" -version = "2.20.0" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "rm %(installdir)s/examples/Makefile_gnu*", - "rm %(installdir)s/examples/Makefile*orig", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-iomkl-2020-bigint.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-iomkl-2020-bigint.eb deleted file mode 100644 index d0e019068a0645d8a1e98f4233df3dd3d0f6260e..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-iomkl-2020-bigint.eb +++ /dev/null @@ -1,51 +0,0 @@ -name = "Hypre" -version = "2.20.0" -versionsuffix = "-bigint" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' -configopts += '--enable-bigint ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "rm %(installdir)s/examples/Makefile_gnu*", - "rm %(installdir)s/examples/Makefile*orig", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/Hypre-2.20.0-iomkl-2020.eb b/Golden_Repo/h/Hypre/Hypre-2.20.0-iomkl-2020.eb deleted file mode 100644 index fe1c07dd25542aeab164ca63006826276e8f2985..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/Hypre-2.20.0-iomkl-2020.eb +++ /dev/null @@ -1,49 +0,0 @@ -name = "Hypre" -version = "2.20.0" - -homepage = "https://computation.llnl.gov/casc/linear_solvers/sls_hypre.html" -description = """Hypre is a library for solving large, sparse linear -systems of equations on massively parallel computers. -The problems of interest arise in the simulation codes being developed -at LLNL and elsewhere to study physical phenomena in the defense, -environmental, energy, and biological sciences. -""" - -examples = """Examples can be found in $EBROOTHYPRE/examples.""" - -usage = """ -Hypre uses LAPACK, programs using Hypre can be linked with --L$HYPRE_LIB -lHYPRE -lm -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2020'} - -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ["https://github.com/hypre-space/hypre/archive/"] -sources = ['v%(version)s.tar.gz'] - -patches = ["hypre-%(version)s_examples_mkl.patch"] - -start_dir = 'src' - -configopts = '--with-openmp ' - -postinstallcmds = [ - "cp -r %(builddir)s/hypre-%(version)s/src/examples %(installdir)s/examples", - "rm %(installdir)s/examples/Makefile_gnu*", - "rm %(installdir)s/examples/Makefile*orig", - "cp %(builddir)s/hypre-%(version)s/src/HYPRE_config.h %(installdir)s/include", - "chmod 755 %(installdir)s/examples/vis", - "chmod 755 %(installdir)s/examples/docs", -] - -modextravars = { - 'HYPRE_ROOT': '%(installdir)s', - 'HYPRE_LIB': '%(installdir)s/lib', - 'HYPRE_INCLUDE': '%(installdir)s/include/' -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/h/Hypre/hypre-2.20.0_examples_mkl.patch b/Golden_Repo/h/Hypre/hypre-2.20.0_examples_mkl.patch deleted file mode 100644 index 5b91ea84cc01381e369363509d101bd6075fbd8e..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/Hypre/hypre-2.20.0_examples_mkl.patch +++ /dev/null @@ -1,510 +0,0 @@ ---- hypre-2.20.0/src/examples/Makefile 2020-09-24 19:56:42.000000000 +0200 -+++ hypre-2.20.0_ok/src/examples/Makefile 2020-10-16 14:50:46.117457000 +0200 -@@ -10,7 +10,8 @@ - F77 = mpif77 - CXX = mpicxx - F90 = mpifort --HYPRE_DIR = ../hypre -+HYPRE_DIR = $(EBROOTHYPRE) -+BLASLAPACKLIB = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread - - ######################################################################## - # Compiling and linking options -@@ -18,7 +19,7 @@ - COPTS = -g -Wall - CINCLUDES = -I$(HYPRE_DIR)/include - #CDEFS = -DHYPRE_EXVIS --CDEFS = -+CDEFS = -DHAVE_CONFIG_H -DHYPRE_TIMING - CFLAGS = $(COPTS) $(CINCLUDES) $(CDEFS) - FOPTS = -g - FINCLUDES = $(CINCLUDES) -@@ -33,7 +34,7 @@ - - - LINKOPTS = $(COPTS) --LIBS = -L$(HYPRE_DIR)/lib -lHYPRE -lm -+LIBS = -L$(HYPRE_DIR)/lib -lHYPRE $(BLASLAPACKLIB) -lm - LFLAGS = $(LINKOPTS) $(LIBS) -lstdc++ - LFLAGS_B =\ - -L${HYPRE_DIR}/lib\ ---- hypre-2.20.0/src/examples/Makefile_gnu 1970-01-01 01:00:00.000000000 +0100 -+++ hypre-2.20.0_ok/src/examples/Makefile_gnu 2020-10-16 14:51:47.427117000 +0200 -@@ -0,0 +1,237 @@ -+# Copyright 1998-2019 Lawrence Livermore National Security, LLC and other -+# HYPRE Project Developers. See the top-level COPYRIGHT file for details. -+# -+# SPDX-License-Identifier: (Apache-2.0 OR MIT) -+ -+######################################################################## -+# Compiler and external dependences -+######################################################################## -+CC = mpicc -+F77 = mpif77 -+CXX = mpicxx -+F90 = mpifort -+HYPRE_DIR = $(EBROOTHYPRE) -+BLASLAPACKLIB = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl -+ -+######################################################################## -+# Compiling and linking options -+######################################################################## -+COPTS = -g -Wall -+CINCLUDES = -I$(HYPRE_DIR)/include -+#CDEFS = -DHYPRE_EXVIS -+CDEFS = -DHAVE_CONFIG_H -DHYPRE_TIMING -+CFLAGS = $(COPTS) $(CINCLUDES) $(CDEFS) -+FOPTS = -g -+FINCLUDES = $(CINCLUDES) -+FFLAGS = $(FOPTS) $(FINCLUDES) -+CXXOPTS = $(COPTS) -Wno-deprecated -+CXXINCLUDES = $(CINCLUDES) -I.. -+CXXDEFS = $(CDEFS) -+IFLAGS_BXX = -+CXXFLAGS = $(CXXOPTS) $(CXXINCLUDES) $(CXXDEFS) $(IFLAGS_BXX) -+IF90FLAGS = -+F90FLAGS = $(FFLAGS) $(IF90FLAGS) -+ -+ -+LINKOPTS = $(COPTS) -+LIBS = -L$(HYPRE_DIR)/lib -lHYPRE $(BLASLAPACKLIB) -lm -+LFLAGS = $(LINKOPTS) $(LIBS) -lstdc++ -+LFLAGS_B =\ -+ -L${HYPRE_DIR}/lib\ -+ -lbHYPREClient-C\ -+ -lbHYPREClient-CX\ -+ -lbHYPREClient-F\ -+ -lbHYPRE\ -+ -lsidl -ldl -lxml2 -+LFLAGS77 = $(LFLAGS) -+LFLAGS90 = -+ -+######################################################################## -+# Rules for compiling the source files -+######################################################################## -+.SUFFIXES: .c .f .cxx .f90 -+ -+.c.o: -+ $(CC) $(CFLAGS) -c $< -+.f.o: -+ $(F77) $(FFLAGS) -c $< -+.cxx.o: -+ $(CXX) $(CXXFLAGS) -c $< -+ -+######################################################################## -+# List of all programs to be compiled -+######################################################################## -+ALLPROGS = ex1 ex2 ex3 ex4 ex5 ex5f ex6 ex7 ex8 ex9 ex11 ex12 ex12f \ -+ ex13 ex14 ex15 ex16 -+BIGINTPROGS = ex5big ex15big -+FORTRANPROGS = ex5f ex12f -+MAXDIMPROGS = ex17 ex18 -+COMPLEXPROGS = ex18comp -+ -+all: $(ALLPROGS) -+ -+default: all -+ -+bigint: $(BIGINTPROGS) -+ -+fortran: $(FORTRANPROGS) -+ -+maxdim: $(MAXDIMPROGS) -+ -+complex: $(COMPLEXPROGS) -+ -+######################################################################## -+# Example 1 -+######################################################################## -+ex1: ex1.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 2 -+######################################################################## -+ex2: ex2.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 3 -+######################################################################## -+ex3: ex3.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 4 -+######################################################################## -+ex4: ex4.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 5 -+######################################################################## -+ex5: ex5.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 5 with 64-bit integers -+######################################################################## -+ex5big: ex5big.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 5 Fortran 77 -+######################################################################## -+ex5f: ex5f.o -+ $(F77) -o $@ $^ $(LFLAGS77) -+ -+######################################################################## -+# Example 6 -+######################################################################## -+ex6: ex6.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 7 -+######################################################################## -+ex7: ex7.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 8 -+######################################################################## -+ex8: ex8.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 9 -+######################################################################## -+ex9: ex9.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 10 -+######################################################################## -+ex10: ex10.o -+ $(CXX) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 11 -+######################################################################## -+ex11: ex11.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 12 -+######################################################################## -+ex12: ex12.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 12 Fortran 77 -+######################################################################## -+ex12f: ex12f.o -+ $(F77) -o $@ $^ $(LFLAGS77) -+ -+######################################################################## -+# Example 13 -+######################################################################## -+ex13: ex13.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 14 -+######################################################################## -+ex14: ex14.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 15 -+######################################################################## -+ex15: ex15.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 15 with 64-bit integers -+######################################################################## -+ex15big: ex15big.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 16 -+######################################################################## -+ex16: ex16.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 17 -+######################################################################## -+ex17: ex17.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 18 -+######################################################################## -+ex18: ex18.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 18 (complex) -+######################################################################## -+ex18comp: ex18comp.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Clean up -+######################################################################## -+clean: -+ rm -f $(ALLPROGS:=.o) -+ rm -f $(BIGINTPROGS:=.o) -+ rm -f $(FORTRANPROGS:=.o) -+ rm -f $(MAXDIMPROGS:=.o) -+ rm -f $(COMPLEXPROGS:=.o) -+ cd vis; make clean -+distclean: clean -+ rm -f $(ALLPROGS) $(ALLPROGS:=*~) -+ rm -f $(BIGINTPROGS) $(BIGINTPROGS:=*~) -+ rm -f $(FORTRANLPROGS) $(FORTRANPROGS:=*~) -+ rm -f $(MAXDIMPROGS) $(MAXDIMPROGS:=*~) -+ rm -f $(COMPLEXPROGS) $(COMPLEXPROGS:=*~) -+ rm -fr README* ---- hypre-2.20.0/src/examples/Makefile_gnu_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ hypre-2.20.0_ok/src/examples/Makefile_gnu_cuda 2020-10-16 14:53:05.941436000 +0200 -@@ -0,0 +1,237 @@ -+# Copyright 1998-2019 Lawrence Livermore National Security, LLC and other -+# HYPRE Project Developers. See the top-level COPYRIGHT file for details. -+# -+# SPDX-License-Identifier: (Apache-2.0 OR MIT) -+ -+######################################################################## -+# Compiler and external dependences -+######################################################################## -+CC = mpicc -+F77 = mpif77 -+CXX = mpicxx -+F90 = mpifort -+HYPRE_DIR = $(EBROOTHYPRE) -+BLASLAPACKLIB = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl -+ -+######################################################################## -+# Compiling and linking options -+######################################################################## -+COPTS = -g -Wall -+CINCLUDES = -I$(HYPRE_DIR)/include -+#CDEFS = -DHYPRE_EXVIS -+CDEFS = -DHAVE_CONFIG_H -DHYPRE_TIMING -+CFLAGS = $(COPTS) $(CINCLUDES) $(CDEFS) -+FOPTS = -g -+FINCLUDES = $(CINCLUDES) -+FFLAGS = $(FOPTS) $(FINCLUDES) -+CXXOPTS = $(COPTS) -Wno-deprecated -+CXXINCLUDES = $(CINCLUDES) -I.. -+CXXDEFS = $(CDEFS) -+IFLAGS_BXX = -+CXXFLAGS = $(CXXOPTS) $(CXXINCLUDES) $(CXXDEFS) $(IFLAGS_BXX) -+IF90FLAGS = -+F90FLAGS = $(FFLAGS) $(IF90FLAGS) -+ -+ -+LINKOPTS = $(COPTS) -+LIBS = -L$(HYPRE_DIR)/lib -lHYPRE $(BLASLAPACKLIB) -lcublas -lcudart -lm -+LFLAGS = $(LINKOPTS) $(LIBS) -lstdc++ -+LFLAGS_B =\ -+ -L${HYPRE_DIR}/lib\ -+ -lbHYPREClient-C\ -+ -lbHYPREClient-CX\ -+ -lbHYPREClient-F\ -+ -lbHYPRE\ -+ -lsidl -ldl -lxml2 -+LFLAGS77 = $(LFLAGS) -+LFLAGS90 = -+ -+######################################################################## -+# Rules for compiling the source files -+######################################################################## -+.SUFFIXES: .c .f .cxx .f90 -+ -+.c.o: -+ $(CC) $(CFLAGS) -c $< -+.f.o: -+ $(F77) $(FFLAGS) -c $< -+.cxx.o: -+ $(CXX) $(CXXFLAGS) -c $< -+ -+######################################################################## -+# List of all programs to be compiled -+######################################################################## -+ALLPROGS = ex1 ex2 ex3 ex4 ex5 ex5f ex6 ex7 ex8 ex9 ex11 ex12 ex12f \ -+ ex13 ex14 ex15 ex16 -+BIGINTPROGS = ex5big ex15big -+FORTRANPROGS = ex5f ex12f -+MAXDIMPROGS = ex17 ex18 -+COMPLEXPROGS = ex18comp -+ -+all: $(ALLPROGS) -+ -+default: all -+ -+bigint: $(BIGINTPROGS) -+ -+fortran: $(FORTRANPROGS) -+ -+maxdim: $(MAXDIMPROGS) -+ -+complex: $(COMPLEXPROGS) -+ -+######################################################################## -+# Example 1 -+######################################################################## -+ex1: ex1.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 2 -+######################################################################## -+ex2: ex2.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 3 -+######################################################################## -+ex3: ex3.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 4 -+######################################################################## -+ex4: ex4.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 5 -+######################################################################## -+ex5: ex5.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 5 with 64-bit integers -+######################################################################## -+ex5big: ex5big.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 5 Fortran 77 -+######################################################################## -+ex5f: ex5f.o -+ $(F77) -o $@ $^ $(LFLAGS77) -+ -+######################################################################## -+# Example 6 -+######################################################################## -+ex6: ex6.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 7 -+######################################################################## -+ex7: ex7.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 8 -+######################################################################## -+ex8: ex8.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 9 -+######################################################################## -+ex9: ex9.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 10 -+######################################################################## -+ex10: ex10.o -+ $(CXX) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 11 -+######################################################################## -+ex11: ex11.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 12 -+######################################################################## -+ex12: ex12.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 12 Fortran 77 -+######################################################################## -+ex12f: ex12f.o -+ $(F77) -o $@ $^ $(LFLAGS77) -+ -+######################################################################## -+# Example 13 -+######################################################################## -+ex13: ex13.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 14 -+######################################################################## -+ex14: ex14.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 15 -+######################################################################## -+ex15: ex15.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 15 with 64-bit integers -+######################################################################## -+ex15big: ex15big.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 16 -+######################################################################## -+ex16: ex16.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 17 -+######################################################################## -+ex17: ex17.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 18 -+######################################################################## -+ex18: ex18.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Example 18 (complex) -+######################################################################## -+ex18comp: ex18comp.o -+ $(CC) -o $@ $^ $(LFLAGS) -+ -+######################################################################## -+# Clean up -+######################################################################## -+clean: -+ rm -f $(ALLPROGS:=.o) -+ rm -f $(BIGINTPROGS:=.o) -+ rm -f $(FORTRANPROGS:=.o) -+ rm -f $(MAXDIMPROGS:=.o) -+ rm -f $(COMPLEXPROGS:=.o) -+ cd vis; make clean -+distclean: clean -+ rm -f $(ALLPROGS) $(ALLPROGS:=*~) -+ rm -f $(BIGINTPROGS) $(BIGINTPROGS:=*~) -+ rm -f $(FORTRANLPROGS) $(FORTRANPROGS:=*~) -+ rm -f $(MAXDIMPROGS) $(MAXDIMPROGS:=*~) -+ rm -f $(COMPLEXPROGS) $(COMPLEXPROGS:=*~) -+ rm -fr README* diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-GCCcore-10.3.0-serial-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-GCCcore-10.3.0-serial-Python-3.8.5.eb deleted file mode 100644 index 06e23dc39205c365ac5b476639951270e47730ed..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-GCCcore-10.3.0-serial-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-serial%s' % local_pysuffix - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d'] - -req_py_majver = 3 -req_py_minver = 0 - -builddependencies = [ - ('pkgconfig', '1.5.1', local_pysuffix), - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', local_pysuffix, ('gcccoremkl', '10.3.0-2021.2.0')), # numpy required - ('HDF5', '1.10.6', '-serial'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-GCCcore-9.3.0-serial-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-GCCcore-9.3.0-serial-Python-3.8.5.eb deleted file mode 100644 index b93c638ff5daa948fbf7b155b1e20d4f701a6ffe..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-GCCcore-9.3.0-serial-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-serial%s' % local_pysuffix - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d'] - -req_py_majver = 3 -req_py_minver = 0 - -builddependencies = [ - ('pkgconfig', '1.5.1', local_pysuffix), - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', local_pysuffix, ('gcccoremkl', '9.3.0-2020.2.254')), # numpy required - ('HDF5', '1.10.6', '-serial'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-gompi-2020-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-gompi-2020-Python-3.8.5.eb deleted file mode 100644 index 2e3149123788e4bd13251ade5232603e1f45d4d5..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-gompi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -patches = ['h5py-2.10.0_avoid-mpi-init.patch'] - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-gompi-2021-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-gompi-2021-Python-3.8.5.eb deleted file mode 100644 index 0a8b8900e2dc4a827bc907d5667d664c238973d1..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-gompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = ['h5py-%(version)s_avoid-mpi-init.patch'] -checksums = [ - '84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d', # h5py-2.10.0.tar.gz - '6bacb71f5d9fbd7bd9a01018d7fe21b067a2317f33c4a7c21fde9cd404c1603f', # h5py-2.10.0_avoid-mpi-init.patch -] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index b73167a6e03dabc505794f2aceb5863c80e868d0..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 1b1f90220547ac59c9abb8d30fe239d125a38213..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = ['h5py-%(version)s_avoid-mpi-init.patch'] -checksums = [ - '84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d', # h5py-2.10.0.tar.gz - '6bacb71f5d9fbd7bd9a01018d7fe21b067a2317f33c4a7c21fde9cd404c1603f', # h5py-2.10.0_avoid-mpi-init.patch -] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-iimpi-2021-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-iimpi-2021-Python-3.8.5.eb deleted file mode 100644 index cf223726c0d441c6d68699cd4cae668d0d5ab42d..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-iimpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = ['h5py-%(version)s_avoid-mpi-init.patch'] -checksums = [ - '84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d', # h5py-2.10.0.tar.gz - '6bacb71f5d9fbd7bd9a01018d7fe21b067a2317f33c4a7c21fde9cd404c1603f', # h5py-2.10.0_avoid-mpi-init.patch -] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-iompi-2021-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-iompi-2021-Python-3.8.5.eb deleted file mode 100644 index 364ad7c4153c653759e7e9aeabb252025934fc78..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-iompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = ['h5py-%(version)s_avoid-mpi-init.patch'] -checksums = [ - '84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d', # h5py-2.10.0.tar.gz - '6bacb71f5d9fbd7bd9a01018d7fe21b067a2317f33c4a7c21fde9cd404c1603f', # h5py-2.10.0_avoid-mpi-init.patch -] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2020-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index a0cbda5a64b144e80e68b04fed92fb4482f95ec8..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2020-mt-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2020-mt-Python-3.8.5.eb deleted file mode 100644 index 23f284c3ad78cbc851273f254db1f99c4ac239bc..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2020-mt-Python-3.8.5.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2021-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index fc7bbdcf87b0f13ef267008ba5f5da939daf6089..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-ipsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = ['h5py-%(version)s_avoid-mpi-init.patch'] -checksums = [ - '84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d', # h5py-2.10.0.tar.gz - '6bacb71f5d9fbd7bd9a01018d7fe21b067a2317f33c4a7c21fde9cd404c1603f', # h5py-2.10.0_avoid-mpi-init.patch -] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2020-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2020-Python-3.8.5.eb deleted file mode 100644 index 1551c6cd355815bf7721e2969061595348479c1a..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2020-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && export CFLAGS="-noswitcherror -D_NPY_NO_DEPRECATIONS" && ' -prebuildopts += 'python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2020.1-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2020.1-Python-3.8.5.eb deleted file mode 100644 index 2dc5e5d720ddfac948c3e00a11e83fe1ee5f7530..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2020.1-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && export CFLAGS="-noswitcherror -D_NPY_NO_DEPRECATIONS" && ' -prebuildopts += 'python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2021-Python-3.8.5.eb b/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2021-Python-3.8.5.eb deleted file mode 100644 index 8a2cde3b3f1c12f5616f95a44214370a44424cdb..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0-npsmpic-2021-Python-3.8.5.eb +++ /dev/null @@ -1,49 +0,0 @@ -easyblock = "PythonPackage" - -name = 'h5py' -version = '2.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.h5py.org/' -description = """HDF5 for Python (h5py) is a general-purpose Python interface to the Hierarchical Data Format library, - version 5. HDF5 is a versatile, mature scientific software library designed for the fast, flexible storage of enormous - amounts of data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -patches = ['h5py-%(version)s_avoid-mpi-init.patch'] -checksums = [ - '84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d', # h5py-2.10.0.tar.gz - '6bacb71f5d9fbd7bd9a01018d7fe21b067a2317f33c4a7c21fde9cd404c1603f', # h5py-2.10.0_avoid-mpi-init.patch -] - -req_py_majver = 3 -req_py_minver = 0 - -# to really use mpi enabled hdf5 we now seem to need a configure step -prebuildopts = 'export LDSHARED="$CC -shared" && export CFLAGS="-noswitcherror -D_NPY_NO_DEPRECATIONS" && ' -prebuildopts += 'python setup.py configure --mpi --hdf5=$EBROOTHDF5 && ' - -builddependencies = [ - ('pkgconfig', '1.5.1', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), # numpy required - ('mpi4py', '3.0.3', versionsuffix), # required for MPI support - ('HDF5', '1.10.6'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/h/h5py/h5py-2.10.0_avoid-mpi-init.patch b/Golden_Repo/h/h5py/h5py-2.10.0_avoid-mpi-init.patch deleted file mode 100644 index 98034776400e528d4baebce3c01bd9dca5bfc4a9..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/h5py/h5py-2.10.0_avoid-mpi-init.patch +++ /dev/null @@ -1,85 +0,0 @@ -avoid that 'import h5py' triggers MPI_Init via mpi4py - -backported to h5py 2.10.0 from https://github.com/h5py/h5py/pull/1552 by Kenneth Hoste (HPC-UGent) - -diff -ru h5py-2.10.0.orig/h5py/api_types_ext.pxd h5py-2.10.0/h5py/api_types_ext.pxd ---- h5py-2.10.0.orig/h5py/api_types_ext.pxd 2019-09-06 23:29:33.000000000 +0200 -+++ h5py-2.10.0/h5py/api_types_ext.pxd 2020-05-25 19:30:30.000000000 +0200 -@@ -12,7 +12,7 @@ - include 'config.pxi' - - IF MPI: -- from mpi4py.MPI cimport MPI_Comm, MPI_Info, Comm, Info -+ from mpi4py.libmpi cimport MPI_Comm, MPI_Info - - cdef extern from "stdlib.h": - ctypedef long size_t -@@ -52,6 +52,7 @@ - cdef extern from "Python.h": - ctypedef void PyObject - ctypedef ssize_t Py_ssize_t -+ ctypedef size_t Py_uintptr_t - - PyObject* PyErr_Occurred() - void PyErr_SetString(object type, char *message) -diff -ru h5py-2.10.0.orig/h5py/h5p.pyx h5py-2.10.0/h5py/h5p.pyx ---- h5py-2.10.0.orig/h5py/h5p.pyx 2019-09-06 23:29:33.000000000 +0200 -+++ h5py-2.10.0/h5py/h5p.pyx 2020-05-25 19:33:24.000000000 +0200 -@@ -17,6 +17,7 @@ - from cpython.buffer cimport PyObject_CheckBuffer, \ - PyObject_GetBuffer, PyBuffer_Release, \ - PyBUF_SIMPLE -+from cpython.long cimport PyLong_AsVoidPtr - - from utils cimport require_tuple, convert_dims, convert_tuple, \ - emalloc, efree, \ -@@ -1161,7 +1162,7 @@ - - IF MPI: - @with_phil -- def set_fapl_mpio(self, Comm comm not None, Info info not None): -+ def set_fapl_mpio(self, comm, info): - """ (Comm comm, Info info) - - Set MPI-I/O Parallel HDF5 driver. -@@ -1169,7 +1170,12 @@ - Comm: An mpi4py.MPI.Comm instance - Info: An mpi4py.MPI.Info instance - """ -- H5Pset_fapl_mpio(self.id, comm.ob_mpi, info.ob_mpi) -+ from mpi4py.MPI import Comm, Info, _handleof -+ assert isinstance(comm, Comm) -+ assert isinstance(info, Info) -+ cdef Py_uintptr_t _comm = _handleof(comm) -+ cdef Py_uintptr_t _info = _handleof(info) -+ H5Pset_fapl_mpio(self.id, <MPI_Comm>_comm, <MPI_Info>_info) - - - @with_phil -@@ -1183,20 +1189,22 @@ - """ - cdef MPI_Comm comm - cdef MPI_Info info -+ from mpi4py.MPI import Comm, Info, _addressof - - H5Pget_fapl_mpio(self.id, &comm, &info) - pycomm = Comm() -- pyinfo = Info() -- MPI_Comm_dup(comm, &pycomm.ob_mpi) -- MPI_Info_dup(info, &pyinfo.ob_mpi) -+ MPI_Comm_dup(comm, <MPI_Comm *>PyLong_AsVoidPtr(_addressof(pycomm))) - MPI_Comm_free(&comm) -+ -+ pyinfo = Info() -+ MPI_Info_dup(info, <MPI_Info *>PyLong_AsVoidPtr(_addressof(pyinfo))) - MPI_Info_free(&info) - - return (pycomm, pyinfo) - - - @with_phil -- def set_fapl_mpiposix(self, Comm comm not None, bint use_gpfs_hints=0): -+ def set_fapl_mpiposix(self, comm, bint use_gpfs_hints=0): - """ Obsolete. - """ - raise RuntimeError("MPI-POSIX driver is broken; removed in h5py 2.3.1") diff --git a/Golden_Repo/h/help2man/help2man-1.48.3-GCCcore-10.3.0.eb b/Golden_Repo/h/help2man/help2man-1.48.3-GCCcore-10.3.0.eb deleted file mode 100644 index 5d11c90818b0b2c1ee0701f7e88c00ddec0004d5..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/help2man/help2man-1.48.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'help2man' -version = '1.48.3' - -homepage = 'https://www.gnu.org/software/help2man/' -description = """help2man produces simple manual pages from the '--help' and '--version' output of other commands.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_XZ] -checksums = ['8361ff3c643fbd391064e97e5f54592ca28b880eaffbf566a68e0ad800d1a8ac'] - -builddependencies = [ - # use same binutils version that was used when building GCC toolchain - ('binutils', '2.36.1', '', True), -] - -sanity_check_paths = { - 'files': ['bin/help2man'], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/h/hwloc/hwloc-2.2.0-GCCcore-9.3.0.eb b/Golden_Repo/h/hwloc/hwloc-2.2.0-GCCcore-9.3.0.eb deleted file mode 100644 index 7707e0c61d7803dc70094dbe7d3aefc833752f99..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/hwloc/hwloc-2.2.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'hwloc' -version = '2.2.0' - -homepage = 'http://www.open-mpi.org/projects/hwloc/' - -description = """ - The Portable Hardware Locality (hwloc) software package provides a portable - abstraction (across OS, versions, architectures, ...) of the hierarchical - topology of modern architectures, including NUMA memory nodes, sockets, shared - caches, cores and simultaneous multithreading. It also gathers various system - attributes such as cache and memory information as well as the locality of I/O - devices such as network interfaces, InfiniBand HCAs or GPUs. It primarily - aims at helping applications with gathering information about modern computing - hardware so as to exploit it accordingly and efficiently. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://www.open-mpi.org/software/hwloc/v%(version_major_minor)s/downloads/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('numactl', '2.0.13', '', SYSTEM), - ('libxml2', '2.9.10'), - ('libpciaccess', '0.16'), - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = "--enable-libnuma=$EBROOTNUMACTL --enable-cuda --enable-nvml --enable-opencl" -configopts += "--disable-cairo --disable-gl --disable-libudev " - -sanity_check_paths = { - 'files': ['bin/lstopo', 'include/hwloc/linux.h', - 'lib/libhwloc.%s' % SHLIB_EXT], - 'dirs': ['share/man/man3'], -} -sanity_check_commands = ['lstopo'] - -modluafooter = ''' -add_property("arch","gpu") -''' -moduleclass = 'system' diff --git a/Golden_Repo/h/hwloc/hwloc-2.4.1-GCCcore-10.3.0.eb b/Golden_Repo/h/hwloc/hwloc-2.4.1-GCCcore-10.3.0.eb deleted file mode 100644 index f28cae2246670551937810e55f4c8fcd3a685086..0000000000000000000000000000000000000000 --- a/Golden_Repo/h/hwloc/hwloc-2.4.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,55 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'hwloc' -version = '2.4.1' - -homepage = 'https://www.open-mpi.org/projects/hwloc/' - -description = """ - The Portable Hardware Locality (hwloc) software package provides a portable - abstraction (across OS, versions, architectures, ...) of the hierarchical - topology of modern architectures, including NUMA memory nodes, sockets, shared - caches, cores and simultaneous multithreading. It also gathers various system - attributes such as cache and memory information as well as the locality of I/O - devices such as network interfaces, InfiniBand HCAs or GPUs. It primarily - aims at helping applications with gathering information about modern computing - hardware so as to exploit it accordingly and efficiently. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -# need to build with -fno-tree-vectorize to avoid segfaulting lstopo on Intel Skylake -# cfr. https://github.com/open-mpi/hwloc/issues/315 -toolchainopts = {'vectorize': False} - -source_urls = [ - 'https://www.open-mpi.org/software/hwloc/v%(version_major_minor)s/downloads/'] -sources = [SOURCE_TAR_GZ] -checksums = ['4267fe1193a8989f3ab7563a7499e047e77e33fed8f4dec16822a7aebcf78459'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('numactl', '2.0.14', '', SYSTEM), - ('libxml2', '2.9.10'), - ('libpciaccess', '0.16'), - ('CUDA', '11.3', '', SYSTEM), -] - -configopts = "--enable-libnuma=$EBROOTNUMACTL --enable-cuda --enable-nvml --enable-opencl " -configopts += "--disable-cairo --disable-gl --disable-libudev " - -sanity_check_paths = { - 'files': ['bin/lstopo', 'include/hwloc/linux.h', - 'lib/libhwloc.%s' % SHLIB_EXT], - 'dirs': ['share/man/man3'], -} -sanity_check_commands = ['lstopo'] - -modluafooter = ''' -add_property("arch","gpu") -''' -moduleclass = 'system' diff --git a/Golden_Repo/hidden_deps.txt b/Golden_Repo/hidden_deps.txt index 66c022e0715d9389f0ac016c1e214aceb45bcf2d..f7b386ed87fadc05e11ebe077c8d7bc720741dbe 100644 --- a/Golden_Repo/hidden_deps.txt +++ b/Golden_Repo/hidden_deps.txt @@ -1,105 +1,168 @@ -adwaita-icon-theme -ant ACTC ANTLR APR APR-util -assimp -ATK AT-SPI2-ATK AT-SPI2-core +ATK Autoconf Automake -babl -binutils Bison -byacc -bzip2 -cairo -cling -configurable-http-proxy +CUSP Coreutils CubeWriter -CUSP -damageproto DB DBus -dbus-glib DocBook-XML Dyninst ETSF_IO -eudev Exiv2 -expat FFmpeg -fixesproto -flatbuffers FLTK -fontconfig -fontsproto -FoX -freeglut -freetype FTGL -g2clib -g2lib -gc +FoX GCCcore -Gdk-Pixbuf GEGL -gettext -gexiv2 -gflags -Ghostscript -giflib GL2PS GLEW -GLib GLM -glog GLPK -glproto +GLib GObject-Introspection -googletest GPC -gperf -GraphicsMagick -gsettings-desktop-schemas GTI GTK+ -GtkSourceView GTS -guile -gzip +Gdk-Pixbuf +Ghostscript +GraphicsMagick +GtkSourceView HarfBuzz -icc ICU -ifort -inputproto -intltool -itstool -JasPer -jhbuild -jsc-xdg-menu JSON-C JSON-GLib -JsonCpp JUnit +JasPer +JsonCpp JupyterKernel-Bash JupyterKernel-Cling JupyterKernel-JavaScript JupyterKernel-Julia JupyterKernel-Octave +JupyterKernel-PyDeepLearning JupyterKernel-PyParaView JupyterKernel-PyQuantum -JupyterKernel-PyDeepLearning JupyterKernel-R JupyterKernel-Ruby JupyterProxy-Matlab JupyterProxy-XpraHTML5 -kbproto LAME +LMDB +LZO LevelDB +LibTIFF +LibUUID +Libint +LittleCMS +M4 +MATIO +NASM +NLopt +OPARI2 +OTF2 +OpenImageDenoise +PCRE +PDT +PROJ +Pango +Pmw +PnMPI +PyCairo +PyGObject +Python-Xpra +Qhull +Qt +Qt5 +S-Lang +SCons +SIP +SQLite +SWIG +Serf +SoX +Szip +Tk +UDUNITS +XKeyboardConfig +XML-Parser +XZ +Yasm +Zip +Zip +adwaita-icon-theme +ant +assimp +babl +binutils +byacc +bzip2 +cairo +cling +configurable-http-proxy +damageproto +dbus-glib +eudev +expat +fixesproto +flatbuffers +fontconfig +fontsproto +freeglut +freetype +g2clib +g2lib +gc +gettext +gexiv2 +gflags +giflib +glog +glproto +googletest +gperf +gsettings-desktop-schemas +guile +gzip +icc +ifort +inputproto +intltool +itstool +jhbuild +jsc-xdg-menu +kbproto +libGLU +libICE +libSM +libX11 +libXau +libXaw +libXcursor +libXdamage +libXdmcp +libXext +libXfixes +libXfont +libXft +libXi +libXinerama +libXmu +libXp +libXpm +libXrandr +libXrender +libXt +libXtst libcerf libcroco libctl @@ -114,11 +177,8 @@ libfontenc libgd libgeotiff libglade -libGLU -libICE libiconv libidn -Libint libjpeg-turbo libmatheval libmypaint @@ -127,109 +187,55 @@ libpng libpthread-stubs libreadline librsvg -libSM libsndfile libspatialindex -LibTIFF libtirpc libtool libunistring libunwind -LibUUID libwebp -libX11 -libXau -libXaw libxcb -libXcursor -libXdamage -libXdmcp -libXext -libXfixes -libXfont -libXft -libXi -libXinerama libxkbcommon libxml2 libxml2-python -libXmu -libXp -libXpm -libXrandr -libXrender libxslt -libXt -libXtst libyaml libyuv -LittleCMS -LMDB lz4 -LZO -M4 makedepend -MATIO motif msgpack-c muparserx -NASM ncurses nettle -nsync nlohmann-json -NLopt nodejs nsync +nsync nvenc_sdk nvidia nvidia-driver -OPARI2 -OpenImageDenoise openvkl -OTF2 -Pango patchelf -PCRE -PDT pixman pkg-config pkgconfig -Pmw -PnMPI popt printproto -PROJ protobuf protobuf-python pscom pybind11 -PyCairo -PyGObject -Python-Xpra -Qhull qrupdate -Qt -Qt5 randrproto recordproto rencode renderproto rkcommon -SCons scrollkeeper -Serf -SIP -SoX -S-Lang snappy spdlog -SQLite -SWIG -Szip texinfo -Tk -UDUNITS uglifyjs util-linux vpx @@ -246,15 +252,9 @@ xcb-util-renderutil xcb-util-wm xextproto xineramaproto -XKeyboardConfig -XML-Parser xorg-macros xprop xproto xtrans -XZ -Yasm yuicompressor -Zip zlib -Zip diff --git a/Golden_Repo/i/ICA-AROMA/ICA-AROMA-0.4.4-beta-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/i/ICA-AROMA/ICA-AROMA-0.4.4-beta-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 0a46fca4fca4c9234a1abe603bd42b78e6a3f6af..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ICA-AROMA/ICA-AROMA-0.4.4-beta-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = "PackedBinary" - -name = 'ICA-AROMA' -version = '0.4.4-beta' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/rhr-pruim/ICA-AROMA' -description = """ICA-based Automatic Removal Of Motion Artifacts identifies and removes motion-related independent -components from fMRI data -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/rhr-pruim/ICA-AROMA/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [('Python', '3.8.5')] - -modextrapaths = {'PYTHONPATH': ['']} - -sanity_check_paths = { - 'files': ['ICA_AROMA.py', 'ICA_AROMA_functions.py'], - 'dirs': [''], -} - -moduleclass = 'data' diff --git a/Golden_Repo/i/ICA-AROMA/ICA-AROMA-0.4.4-beta-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/i/ICA-AROMA/ICA-AROMA-0.4.4-beta-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index d4799ebd60984b480350d655702a5a8a8f3a1976..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ICA-AROMA/ICA-AROMA-0.4.4-beta-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = "PackedBinary" - -name = 'ICA-AROMA' -version = '0.4.4-beta' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/rhr-pruim/ICA-AROMA' -description = """ICA-based Automatic Removal Of Motion Artifacts identifies and removes motion-related independent -components from fMRI data -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/rhr-pruim/ICA-AROMA/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [('Python', '3.8.5')] - -modextrapaths = {'PYTHONPATH': ['']} - -sanity_check_paths = { - 'files': ['ICA_AROMA.py', 'ICA_AROMA_functions.py'], - 'dirs': [''], -} - -moduleclass = 'data' diff --git a/Golden_Repo/i/ICU/ICU-67.1-GCCcore-10.3.0.eb b/Golden_Repo/i/ICU/ICU-67.1-GCCcore-10.3.0.eb deleted file mode 100644 index a3da738b853d26b46442b8b499a8c1e714c758eb..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ICU/ICU-67.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ICU' -version = '67.1' - -homepage = 'https://site.icu-project.org/home' -description = """ICU is a mature, widely used set of C/C++ and Java libraries providing Unicode and Globalization -support for software applications.""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -site_contacts = 'a.kreuzer@fz-juelich.de' - -source_urls = ['https://github.com/unicode-org/icu/releases/download/release-%(version_major)s-%(version_minor)s'] -sources = ['icu4c-%(version_major)s_%(version_minor)s-src.tgz'] - -builddependencies = [('binutils', '2.36.1')] - -start_dir = 'source' - -sanity_check_paths = { - 'files': ['lib/libicu%s.%s' % (x, SHLIB_EXT) for x in ['data', 'i18n', 'io', 'test', 'tu', 'uc']], - 'dirs': ['bin', 'include/unicode', 'share/icu', 'share/man'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/i/ICU/ICU-67.1-GCCcore-9.3.0.eb b/Golden_Repo/i/ICU/ICU-67.1-GCCcore-9.3.0.eb deleted file mode 100644 index 4cda421a46e7340fdeda8a474ad8098e8673eaa6..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ICU/ICU-67.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ICU' -version = '67.1' - -homepage = 'https://site.icu-project.org/home' -description = """ICU is a mature, widely used set of C/C++ and Java libraries providing Unicode and Globalization -support for software applications.""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -site_contacts = 'a.kreuzer@fz-juelich.de' - -source_urls = ['https://github.com/unicode-org/icu/releases/download/release-%(version_major)s-%(version_minor)s'] -sources = ['icu4c-%(version_major)s_%(version_minor)s-src.tgz'] - -builddependencies = [('binutils', '2.34')] - -start_dir = 'source' - -sanity_check_paths = { - 'files': ['lib/libicu%s.%s' % (x, SHLIB_EXT) for x in ['data', 'i18n', 'io', 'test', 'tu', 'uc']], - 'dirs': ['bin', 'include/unicode', 'share/icu', 'share/man'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/i/ITK/ITK-5.1.2-GCCcore-9.3.0-nompi-Python-3.8.5.eb b/Golden_Repo/i/ITK/ITK-5.1.2-GCCcore-9.3.0-nompi-Python-3.8.5.eb deleted file mode 100644 index 87c9cb20686820c9147a754183dd376a839732e3..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ITK/ITK-5.1.2-GCCcore-9.3.0-nompi-Python-3.8.5.eb +++ /dev/null @@ -1,93 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ITK' -version = '5.1.2' -versionsuffix = '-nompi-Python-%(pyver)s' - -homepage = 'https://itk.org' -description = """Insight Segmentation and Registration Toolkit (ITK) provides - an extensive suite of software tools for registering and segmenting - multidimensional imaging data.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/InsightSoftwareConsortium/ITK/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['651284ce6f68e8bd31db176a6b53f5679209a8ed5b6b5480c3591d66c6e10b60'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Bison', '3.6.4'), - ('pkg-config', '0.29.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('HDF5', '1.10.6', '-serial'), - ('SWIG', '3.0.12', '-Python-%(pyver)s'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), - ('expat', '2.2.9'), - ('Eigen', '3.3.7'), - # ('ParaView', '5.8.1', '-Python-%(pyver)s'), - ('tbb', '2020.3'), - ('Qt5', '5.14.2'), - ('OpenGL', '2020'), - ('X11', '20200222'), -] - -separate_build_dir = True - -configopts = "-DCMAKE_BUILD_TYPE=Release " -configopts += "-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON " - -configopts += "-DBUILD_SHARED_LIBS=ON " -configopts += "-DBUILD_TESTING=OFF " -# configopts += "-DITK_FORBID_DOWNLOADS=ON " - -configopts += "-DITKV4_COMPATIBILITY=ON " -configopts += "-DITK_LEGACY_SILENT=ON " - -configopts += "-DITK_USE_SYSTEM_SWIG=ON " -configopts += "-DSWIG_EXECUTABLE=${EBROOTSWIG}/bin/swig " -configopts += "-DSWIG_DIR=${EBROOTSWIG} " - -configopts += "-DITK_USE_SYSTEM_EIGEN=ON " -configopts += "-DEigen3_DIR=$EBROOTEIGEN/share/eigen3/cmake " - -configopts += "-DITK_USE_SYSTEM_HDF5=ON " -configopts += "-DHDF5_DIR=$EBROOTHDF5 " - -configopts += "-DITK_WRAP_PYTHON=ON " -configopts += "-DModule_ITKReview=ON " - -# fails with ITK_WRAP_PYTON=ON, error, because vtkImage*.h cannot be found -# read: https://discourse.itk.org/t/problem-with-building-itk-with-module-itkvtkglue-on/2315 -# code: https://github.com/InsightSoftwareConsortium/ITKVtkGlue/blob/itk-5.0.1/CMakeLists.txt#L81 -# configopts="-DModule_ITKVtkGlue=ON ${configopts} " -# configopts="-DVTK_DIR=$EBROOTPARAVIEW/lib64/cmake/paraview-5.8/vtk ${configopts} " -# configopts="-DVTK_INCLUDE_DIRS=$EBROOTPARAVIEW/include/paraview-5.8 ${configopts} " -# configopts="-DVTK_LIBRARIES=$EBROOTPARAVIEW/lib64 ${configopts} " - -preinstallopts = "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -sanity_check_paths = { - 'files': ['bin/itkTestDriver', - 'lib/libITKVTK-%(version_major_minor)s.so', - 'lib/libITKIOJPEG-%(version_major_minor)s.so', - 'lib/libITKCommon-%(version_major_minor)s.so'], - 'dirs': ['include/ITK-%(version_major_minor)s', - 'lib/python%(pyshortver)s/site-packages', - 'share'], -} - -sanity_check_commands = [('python', "-c 'import %(namelower)s'")] - -moduleclass = 'vis' diff --git a/Golden_Repo/i/ITK/ITK-5.1.2-gompi-2020-Python-3.8.5.eb b/Golden_Repo/i/ITK/ITK-5.1.2-gompi-2020-Python-3.8.5.eb deleted file mode 100644 index 2bfa842e22d52eab5f9d404bcf933bb82fff1011..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ITK/ITK-5.1.2-gompi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,93 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ITK' -version = '5.1.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://itk.org' -description = """Insight Segmentation and Registration Toolkit (ITK) provides - an extensive suite of software tools for registering and segmenting - multidimensional imaging data.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': False} - -source_urls = ['https://github.com/InsightSoftwareConsortium/ITK/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['651284ce6f68e8bd31db176a6b53f5679209a8ed5b6b5480c3591d66c6e10b60'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Bison', '3.6.4'), - ('pkg-config', '0.29.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('HDF5', '1.10.6', '-serial'), - ('SWIG', '3.0.12', '-Python-%(pyver)s'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), - ('expat', '2.2.9'), - ('Eigen', '3.3.7'), - # ('ParaView', '5.8.1', '-Python-%(pyver)s'), - ('tbb', '2020.3'), - ('Qt5', '5.14.2'), - ('OpenGL', '2020'), - ('X11', '20200222'), -] - -separate_build_dir = True - -configopts = "-DCMAKE_BUILD_TYPE=Release " -configopts += "-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON " - -configopts += "-DBUILD_SHARED_LIBS=ON " -configopts += "-DBUILD_TESTING=OFF " -# configopts += "-DITK_FORBID_DOWNLOADS=ON " - -configopts += "-DITKV4_COMPATIBILITY=ON " -configopts += "-DITK_LEGACY_SILENT=ON " - -configopts += "-DITK_USE_SYSTEM_SWIG=ON " -configopts += "-DSWIG_EXECUTABLE=${EBROOTSWIG}/bin/swig " -configopts += "-DSWIG_DIR=${EBROOTSWIG} " - -configopts += "-DITK_USE_SYSTEM_EIGEN=ON " -configopts += "-DEigen3_DIR=$EBROOTEIGEN/share/eigen3/cmake " - -configopts += "-DITK_USE_SYSTEM_HDF5=ON " -configopts += "-DHDF5_DIR=$EBROOTHDF5 " - -configopts += "-DITK_WRAP_PYTHON=ON " -configopts += "-DModule_ITKReview=ON " - -# fails with ITK_WRAP_PYTON=ON, error, because vtkImage*.h cannot be found -# read: https://discourse.itk.org/t/problem-with-building-itk-with-module-itkvtkglue-on/2315 -# code: https://github.com/InsightSoftwareConsortium/ITKVtkGlue/blob/itk-5.0.1/CMakeLists.txt#L81 -# configopts="-DModule_ITKVtkGlue=ON ${configopts} " -# configopts="-DVTK_DIR=$EBROOTPARAVIEW/lib64/cmake/paraview-5.8/vtk ${configopts} " -# configopts="-DVTK_INCLUDE_DIRS=$EBROOTPARAVIEW/include/paraview-5.8 ${configopts} " -# configopts="-DVTK_LIBRARIES=$EBROOTPARAVIEW/lib64 ${configopts} " - -preinstallopts = "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -sanity_check_paths = { - 'files': ['bin/itkTestDriver', - 'lib/libITKVTK-%(version_major_minor)s.so', - 'lib/libITKIOJPEG-%(version_major_minor)s.so', - 'lib/libITKCommon-%(version_major_minor)s.so'], - 'dirs': ['include/ITK-%(version_major_minor)s', - 'lib/python%(pyshortver)s/site-packages', - 'share'], -} - -sanity_check_commands = [('python', "-c 'import %(namelower)s'")] - -moduleclass = 'vis' diff --git a/Golden_Repo/i/ITK/ITK-5.1.2-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/i/ITK/ITK-5.1.2-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index 75f56451296a006c4f4390b526d77e7e187d2f31..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ITK/ITK-5.1.2-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,93 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ITK' -version = '5.1.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://itk.org' -description = """Insight Segmentation and Registration Toolkit (ITK) provides - an extensive suite of software tools for registering and segmenting - multidimensional imaging data.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': False} - -source_urls = ['https://github.com/InsightSoftwareConsortium/ITK/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['651284ce6f68e8bd31db176a6b53f5679209a8ed5b6b5480c3591d66c6e10b60'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Bison', '3.6.4'), - ('pkg-config', '0.29.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('HDF5', '1.10.6', '-serial'), - ('SWIG', '3.0.12', '-Python-%(pyver)s'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), - ('expat', '2.2.9'), - ('Eigen', '3.3.7'), - # ('ParaView', '5.8.1', '-Python-%(pyver)s'), - ('tbb', '2020.3'), - ('Qt5', '5.14.2'), - ('OpenGL', '2020'), - ('X11', '20200222'), -] - -separate_build_dir = True - -configopts = "-DCMAKE_BUILD_TYPE=Release " -configopts += "-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON " - -configopts += "-DBUILD_SHARED_LIBS=ON " -configopts += "-DBUILD_TESTING=OFF " -# configopts += "-DITK_FORBID_DOWNLOADS=ON " - -configopts += "-DITKV4_COMPATIBILITY=ON " -configopts += "-DITK_LEGACY_SILENT=ON " - -configopts += "-DITK_USE_SYSTEM_SWIG=ON " -configopts += "-DSWIG_EXECUTABLE=${EBROOTSWIG}/bin/swig " -configopts += "-DSWIG_DIR=${EBROOTSWIG} " - -configopts += "-DITK_USE_SYSTEM_EIGEN=ON " -configopts += "-DEigen3_DIR=$EBROOTEIGEN/share/eigen3/cmake " - -configopts += "-DITK_USE_SYSTEM_HDF5=ON " -configopts += "-DHDF5_DIR=$EBROOTHDF5 " - -configopts += "-DITK_WRAP_PYTHON=ON " -configopts += "-DModule_ITKReview=ON " - -# fails with ITK_WRAP_PYTON=ON, error, because vtkImage*.h cannot be found -# read: https://discourse.itk.org/t/problem-with-building-itk-with-module-itkvtkglue-on/2315 -# code: https://github.com/InsightSoftwareConsortium/ITKVtkGlue/blob/itk-5.0.1/CMakeLists.txt#L81 -# configopts="-DModule_ITKVtkGlue=ON ${configopts} " -# configopts="-DVTK_DIR=$EBROOTPARAVIEW/lib64/cmake/paraview-5.8/vtk ${configopts} " -# configopts="-DVTK_INCLUDE_DIRS=$EBROOTPARAVIEW/include/paraview-5.8 ${configopts} " -# configopts="-DVTK_LIBRARIES=$EBROOTPARAVIEW/lib64 ${configopts} " - -preinstallopts = "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -sanity_check_paths = { - 'files': ['bin/itkTestDriver', - 'lib/libITKVTK-%(version_major_minor)s.so', - 'lib/libITKIOJPEG-%(version_major_minor)s.so', - 'lib/libITKCommon-%(version_major_minor)s.so'], - 'dirs': ['include/ITK-%(version_major_minor)s', - 'lib/python%(pyshortver)s/site-packages', - 'share'], -} - -sanity_check_commands = [('python', "-c 'import %(namelower)s'")] - -moduleclass = 'vis' diff --git a/Golden_Repo/i/ITK/ITK-5.2.0-GCCcore-10.3.0-nompi-Python-3.8.5.eb b/Golden_Repo/i/ITK/ITK-5.2.0-GCCcore-10.3.0-nompi-Python-3.8.5.eb deleted file mode 100644 index fef7c0016b4eb14aad754e64c160224cbe8a7ba1..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ITK/ITK-5.2.0-GCCcore-10.3.0-nompi-Python-3.8.5.eb +++ /dev/null @@ -1,94 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ITK' -version = '5.2.0' -versionsuffix = '-nompi-Python-%(pyver)s' - -homepage = 'https://itk.org' -description = """Insight Segmentation and Registration Toolkit (ITK) provides - an extensive suite of software tools for registering and segmenting - multidimensional imaging data.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/InsightSoftwareConsortium/ITK/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e53961cd78df8bcfaf8bd8b813ae2cafdde984c7650a2ddf7dcf808df463ea74'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('Bison', '3.7.6'), - ('pkg-config', '0.29.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('HDF5', '1.10.6', '-serial'), - ('SWIG', '4.0.2', '-Python-%(pyver)s'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), - ('expat', '2.2.9'), - ('Eigen', '3.3.7'), - # ('ParaView', '5.8.1', '-Python-%(pyver)s'), - ('tbb', '2020.3'), - ('Qt5', '5.14.2'), - ('OpenGL', '2020'), - ('X11', '20200222'), - ('SciPy-Stack', '2021', '-Python-%(pyver)s', ('gcccoremkl', '10.3.0-2021.2.0')), -] - -separate_build_dir = True - -configopts = "-DCMAKE_BUILD_TYPE=Release " -configopts += "-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON " - -configopts += "-DBUILD_SHARED_LIBS=ON " -configopts += "-DBUILD_TESTING=OFF " -# configopts += "-DITK_FORBID_DOWNLOADS=ON " - -configopts += "-DITKV4_COMPATIBILITY=ON " -configopts += "-DITK_LEGACY_SILENT=ON " - -configopts += "-DITK_USE_SYSTEM_SWIG=ON " -configopts += "-DSWIG_EXECUTABLE=${EBROOTSWIG}/bin/swig " -configopts += "-DSWIG_DIR=${EBROOTSWIG} " - -configopts += "-DITK_USE_SYSTEM_EIGEN=ON " -configopts += "-DEigen3_DIR=$EBROOTEIGEN/share/eigen3/cmake " - -configopts += "-DITK_USE_SYSTEM_HDF5=ON " -configopts += "-DHDF5_DIR=$EBROOTHDF5 " - -configopts += "-DITK_WRAP_PYTHON=ON " -configopts += "-DModule_ITKReview=ON " - -# fails with ITK_WRAP_PYTON=ON, error, because vtkImage*.h cannot be found -# read: https://discourse.itk.org/t/problem-with-building-itk-with-module-itkvtkglue-on/2315 -# code: https://github.com/InsightSoftwareConsortium/ITKVtkGlue/blob/itk-5.0.1/CMakeLists.txt#L81 -# configopts="-DModule_ITKVtkGlue=ON ${configopts} " -# configopts="-DVTK_DIR=$EBROOTPARAVIEW/lib64/cmake/paraview-5.8/vtk ${configopts} " -# configopts="-DVTK_INCLUDE_DIRS=$EBROOTPARAVIEW/include/paraview-5.8 ${configopts} " -# configopts="-DVTK_LIBRARIES=$EBROOTPARAVIEW/lib64 ${configopts} " - -preinstallopts = "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -sanity_check_paths = { - 'files': ['bin/itkTestDriver', - 'lib/libITKVTK-%(version_major_minor)s.so', - 'lib/libITKIOJPEG-%(version_major_minor)s.so', - 'lib/libITKCommon-%(version_major_minor)s.so'], - 'dirs': ['include/ITK-%(version_major_minor)s', - 'lib/python%(pyshortver)s/site-packages', - 'share'], -} - -sanity_check_commands = [('python', "-c 'import %(namelower)s'")] - -moduleclass = 'vis' diff --git a/Golden_Repo/i/ITK/ITK-5.2.0-gompi-2021-Python-3.8.5.eb b/Golden_Repo/i/ITK/ITK-5.2.0-gompi-2021-Python-3.8.5.eb deleted file mode 100644 index 5e4719911c5e2ddbddcbd410e0135d67b34c9095..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ITK/ITK-5.2.0-gompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,94 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ITK' -version = '5.2.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://itk.org' -description = """Insight Segmentation and Registration Toolkit (ITK) provides - an extensive suite of software tools for registering and segmenting - multidimensional imaging data.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': False} - -source_urls = ['https://github.com/InsightSoftwareConsortium/ITK/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e53961cd78df8bcfaf8bd8b813ae2cafdde984c7650a2ddf7dcf808df463ea74'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('Bison', '3.7.6'), - ('pkg-config', '0.29.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('HDF5', '1.10.6', '-serial'), - ('SWIG', '4.0.2', '-Python-%(pyver)s'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), - ('expat', '2.2.9'), - ('Eigen', '3.3.7'), - # ('ParaView', '5.8.1', '-Python-%(pyver)s'), - ('tbb', '2020.3'), - ('Qt5', '5.14.2'), - ('OpenGL', '2020'), - ('X11', '20200222'), - ('SciPy-Stack', '2021', '-Python-%(pyver)s', ('gcccoremkl', '10.3.0-2021.2.0')), -] - -separate_build_dir = True - -configopts = "-DCMAKE_BUILD_TYPE=Release " -configopts += "-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON " - -configopts += "-DBUILD_SHARED_LIBS=ON " -configopts += "-DBUILD_TESTING=OFF " -# configopts += "-DITK_FORBID_DOWNLOADS=ON " - -configopts += "-DITKV4_COMPATIBILITY=ON " -configopts += "-DITK_LEGACY_SILENT=ON " - -configopts += "-DITK_USE_SYSTEM_SWIG=ON " -configopts += "-DSWIG_EXECUTABLE=${EBROOTSWIG}/bin/swig " -configopts += "-DSWIG_DIR=${EBROOTSWIG} " - -configopts += "-DITK_USE_SYSTEM_EIGEN=ON " -configopts += "-DEigen3_DIR=$EBROOTEIGEN/share/eigen3/cmake " - -configopts += "-DITK_USE_SYSTEM_HDF5=ON " -configopts += "-DHDF5_DIR=$EBROOTHDF5 " - -configopts += "-DITK_WRAP_PYTHON=ON " -configopts += "-DModule_ITKReview=ON " - -# fails with ITK_WRAP_PYTON=ON, error, because vtkImage*.h cannot be found -# read: https://discourse.itk.org/t/problem-with-building-itk-with-module-itkvtkglue-on/2315 -# code: https://github.com/InsightSoftwareConsortium/ITKVtkGlue/blob/itk-5.0.1/CMakeLists.txt#L81 -# configopts="-DModule_ITKVtkGlue=ON ${configopts} " -# configopts="-DVTK_DIR=$EBROOTPARAVIEW/lib64/cmake/paraview-5.8/vtk ${configopts} " -# configopts="-DVTK_INCLUDE_DIRS=$EBROOTPARAVIEW/include/paraview-5.8 ${configopts} " -# configopts="-DVTK_LIBRARIES=$EBROOTPARAVIEW/lib64 ${configopts} " - -preinstallopts = "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -sanity_check_paths = { - 'files': ['bin/itkTestDriver', - 'lib/libITKVTK-%(version_major_minor)s.so', - 'lib/libITKIOJPEG-%(version_major_minor)s.so', - 'lib/libITKCommon-%(version_major_minor)s.so'], - 'dirs': ['include/ITK-%(version_major_minor)s', - 'lib/python%(pyshortver)s/site-packages', - 'share'], -} - -sanity_check_commands = [('python', "-c 'import %(namelower)s'")] - -moduleclass = 'vis' diff --git a/Golden_Repo/i/ITK/ITK-5.2.0-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/i/ITK/ITK-5.2.0-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 0b78c8739dd958a3a83aae0c7bd4e3ceb3699d8e..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ITK/ITK-5.2.0-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,94 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ITK' -version = '5.2.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://itk.org' -description = """Insight Segmentation and Registration Toolkit (ITK) provides - an extensive suite of software tools for registering and segmenting - multidimensional imaging data.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': False} - -source_urls = ['https://github.com/InsightSoftwareConsortium/ITK/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e53961cd78df8bcfaf8bd8b813ae2cafdde984c7650a2ddf7dcf808df463ea74'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('Bison', '3.7.6'), - ('pkg-config', '0.29.2'), - ('Perl', '5.32.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('HDF5', '1.10.6', '-serial'), - ('SWIG', '4.0.2', '-Python-%(pyver)s'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), - ('expat', '2.2.9'), - ('Eigen', '3.3.7'), - # ('ParaView', '5.8.1', '-Python-%(pyver)s'), - ('tbb', '2020.3'), - ('Qt5', '5.14.2'), - ('OpenGL', '2020'), - ('X11', '20200222'), - ('SciPy-Stack', '2021', '-Python-%(pyver)s', ('gcccoremkl', '10.3.0-2021.2.0')), -] - -separate_build_dir = True - -configopts = "-DCMAKE_BUILD_TYPE=Release " -configopts += "-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON " - -configopts += "-DBUILD_SHARED_LIBS=ON " -configopts += "-DBUILD_TESTING=OFF " -# configopts += "-DITK_FORBID_DOWNLOADS=ON " - -configopts += "-DITKV4_COMPATIBILITY=ON " -configopts += "-DITK_LEGACY_SILENT=ON " - -configopts += "-DITK_USE_SYSTEM_SWIG=ON " -configopts += "-DSWIG_EXECUTABLE=${EBROOTSWIG}/bin/swig " -configopts += "-DSWIG_DIR=${EBROOTSWIG} " - -configopts += "-DITK_USE_SYSTEM_EIGEN=ON " -configopts += "-DEigen3_DIR=$EBROOTEIGEN/share/eigen3/cmake " - -configopts += "-DITK_USE_SYSTEM_HDF5=ON " -configopts += "-DHDF5_DIR=$EBROOTHDF5 " - -configopts += "-DITK_WRAP_PYTHON=ON " -configopts += "-DModule_ITKReview=ON " - -# fails with ITK_WRAP_PYTON=ON, error, because vtkImage*.h cannot be found -# read: https://discourse.itk.org/t/problem-with-building-itk-with-module-itkvtkglue-on/2315 -# code: https://github.com/InsightSoftwareConsortium/ITKVtkGlue/blob/itk-5.0.1/CMakeLists.txt#L81 -# configopts="-DModule_ITKVtkGlue=ON ${configopts} " -# configopts="-DVTK_DIR=$EBROOTPARAVIEW/lib64/cmake/paraview-5.8/vtk ${configopts} " -# configopts="-DVTK_INCLUDE_DIRS=$EBROOTPARAVIEW/include/paraview-5.8 ${configopts} " -# configopts="-DVTK_LIBRARIES=$EBROOTPARAVIEW/lib64 ${configopts} " - -preinstallopts = "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -sanity_check_paths = { - 'files': ['bin/itkTestDriver', - 'lib/libITKVTK-%(version_major_minor)s.so', - 'lib/libITKIOJPEG-%(version_major_minor)s.so', - 'lib/libITKCommon-%(version_major_minor)s.so'], - 'dirs': ['include/ITK-%(version_major_minor)s', - 'lib/python%(pyshortver)s/site-packages', - 'share'], -} - -sanity_check_commands = [('python', "-c 'import %(namelower)s'")] - -moduleclass = 'vis' diff --git a/Golden_Repo/i/ImageMagick/ImageMagick-7.0.10-25-GCCcore-10.3.0.eb b/Golden_Repo/i/ImageMagick/ImageMagick-7.0.10-25-GCCcore-10.3.0.eb deleted file mode 100644 index d88d1e1fbac59427e1476527c649da8143281381..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ImageMagick/ImageMagick-7.0.10-25-GCCcore-10.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# Author: Ravi Tripathi -# Email: ravi89@uab.edu - -easyblock = 'ConfigureMake' - -name = 'ImageMagick' -version = '7.0.10-25' - -homepage = 'http://www.imagemagick.org/' -description = """ImageMagick is a software suite to create, edit, compose, or convert bitmap images""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/ImageMagick/ImageMagick/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['b6b058a70e9a64eb54672fdc6e984ddb9f62530151509d290b223904132913ba'] - -dependencies = [ - ('bzip2', '1.0.8'), - ('X11', '20200222'), - ('Ghostscript', '9.52'), - ('JasPer', '2.0.19'), - ('libjpeg-turbo', '2.0.5'), - ('LibTIFF', '4.1.0'), - ('LittleCMS', '2.11'), - ('OpenEXR', '2.5.2'), -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -configopts = "--with-gslib --with-x" - -sanity_check_paths = { - 'files': [], - 'dirs': ['bin', 'etc/%(name)s-%(version_major)s', 'include/%(name)s-%(version_major)s', 'lib', 'share'], -} - -modextravars = {'MAGICK_HOME': '%(installdir)s'} - -moduleclass = 'vis' diff --git a/Golden_Repo/i/ImageMagick/ImageMagick-7.0.10-25-GCCcore-9.3.0.eb b/Golden_Repo/i/ImageMagick/ImageMagick-7.0.10-25-GCCcore-9.3.0.eb deleted file mode 100644 index c1f3578f25534f3e81dd6c86daf8bd00c0c3ad32..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ImageMagick/ImageMagick-7.0.10-25-GCCcore-9.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# Author: Ravi Tripathi -# Email: ravi89@uab.edu - -easyblock = 'ConfigureMake' - -name = 'ImageMagick' -version = '7.0.10-25' - -homepage = 'http://www.imagemagick.org/' -description = """ImageMagick is a software suite to create, edit, compose, or convert bitmap images""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/ImageMagick/ImageMagick/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['b6b058a70e9a64eb54672fdc6e984ddb9f62530151509d290b223904132913ba'] - -dependencies = [ - ('bzip2', '1.0.8'), - ('X11', '20200222'), - ('Ghostscript', '9.52'), - ('JasPer', '2.0.19'), - ('libjpeg-turbo', '2.0.5'), - ('LibTIFF', '4.1.0'), - ('LittleCMS', '2.11'), - ('OpenEXR', '2.5.2'), -] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -configopts = "--with-gslib --with-x" - -sanity_check_paths = { - 'files': [], - 'dirs': ['bin', 'etc/%(name)s-%(version_major)s', 'include/%(name)s-%(version_major)s', 'lib', 'share'], -} - -modextravars = {'MAGICK_HOME': '%(installdir)s'} - -moduleclass = 'vis' diff --git a/Golden_Repo/i/Inspector/Inspector-2020_update3.eb b/Golden_Repo/i/Inspector/Inspector-2020_update3.eb deleted file mode 100644 index 2011394973c775bc05f42ec08a6b9324b307b231..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/Inspector/Inspector-2020_update3.eb +++ /dev/null @@ -1,18 +0,0 @@ -name = 'Inspector' -version = '2020_update3' - -homepage = 'http://software.intel.com/en-us/intel-inspector-xe' -description = """Intel Inspector 2017 is an easy to use memory error checker and thread checker for serial and parallel -applications """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['inspector_%(version)s.tar.gz'] - -dontcreateinstalldir = True - -requires_runtime_license = False - -moduleclass = 'tools' diff --git a/Golden_Repo/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 81c2f7af68773a20d02d22243651a4165148c216..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,53 +0,0 @@ -name = 'iccifort' -version = '2020.2.254' -versionsuffix = '-GCC-9.3.0' - -homepage = 'https://software.intel.com/en-us/intel-compilers/' -description = "Intel C, C++ & Fortran compilers" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16744/'] -sources = ['parallel_studio_xe_%(version_major)s_update%(version_minor)s_cluster_edition.tgz'] -patches = ['iccifort-%(version)s_no_mpi_rt_dependency.patch'] -checksums = [ - # parallel_studio_xe_2020_update2_composer_edition.tgz - '4795c44374e8988b91da20ac8f13022d7d773461def4a26ca210a8694f69f133', - # iccifort-2020.2.254_no_mpi_rt_dependency.patch - '73e582d9e108d0680c19c14e9a9c49dbbb06829e39ba8ed87bfd6b4222231196', -] - -local_gccver = '9.3.0' - -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), -] - -# list of regex for components to install -# full list of components can be obtained from pset/mediaconfig.xml in unpacked sources -# cfr. https://software.intel.com/en-us/articles/intel-composer-xe-2015-silent-installation-guide -components = [ - 'intel-comp', 'intel-ccomp', 'intel-fcomp', 'intel-icc', 'intel-ifort', - 'intel-openmp', 'intel-ipsc?_', 'intel-gdb(?!.*mic)' -] - -dontcreateinstalldir = True - -# disable data collection -modextravars = { - 'INTEL_DISABLE_ISIP': '1' -} - -# We have a custom naming scheme that allows us to use a more descriptive module name -modaltsoftname = 'Intel' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = 'family("compiler")' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -moduleclass = 'compiler' diff --git a/Golden_Repo/i/iccifort/iccifort-2020.2.254_no_mpi_rt_dependency.patch b/Golden_Repo/i/iccifort/iccifort-2020.2.254_no_mpi_rt_dependency.patch deleted file mode 100644 index 996914a65e25a5c4d0271057ce090a7d24111c9e..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/iccifort/iccifort-2020.2.254_no_mpi_rt_dependency.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ruN parallel_studio_xe_2020_update2_cluster_edition.orig/pset/mediaconfig.xml parallel_studio_xe_2020_update2_cluster_edition/pset/mediaconfig.xml ---- parallel_studio_xe_2020_update2_cluster_edition.orig/pset/mediaconfig.xml 2020-06-29 11:38:53.482655692 +0200 -+++ parallel_studio_xe_2020_update2_cluster_edition/pset/mediaconfig.xml 2020-10-08 10:49:17.075991073 +0200 -@@ -1737,7 +1737,7 @@ - </Layers> - <InstallDir>${COMPLIB_ROOT}</InstallDir> - <GUID>1593423482368</GUID> -- <Component depend="53.3;6.1;6.3;6.91;6.12;6.7;6.24;153.2" id="1" invisible="0" mandatory="0" platform="INTEL64"> -+ <Component depend="53.3;6.1;6.3;6.91;6.12;6.7;6.24" id="1" invisible="0" mandatory="0" platform="INTEL64"> - <Name>Intel Fortran Compiler for Intel(R) 64</Name> - <Description>Intel Fortran Compiler 19.1 Update 2</Description> - <Name locale="ja_JP.UTF-8">インテル(R) Fortran コンパイラー (インテル(R) 64)</Name> diff --git a/Golden_Repo/i/iimpi/iimpi-2020.eb b/Golden_Repo/i/iimpi/iimpi-2020.eb deleted file mode 100644 index 23eb0117abefd17f0c6105b37cd91126385aa4b3..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/iimpi/iimpi-2020.eb +++ /dev/null @@ -1,20 +0,0 @@ -easyblock = "Toolchain" - -name = 'iimpi' -version = '2020' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = 'Intel C/C++ and Fortran compilers, alongside Intel MPI.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compver = '2020.2.254' -local_gccsuffix = '-GCC-9.3.0' -dependencies = [ - ('iccifort', '%s%s' % (local_compver, local_gccsuffix)), - ('impi', '2019.8.254', '', ('iccifort', '%s%s' % (local_compver, local_gccsuffix))), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/iimpi/iimpi-2021.eb b/Golden_Repo/i/iimpi/iimpi-2021.eb deleted file mode 100644 index 63adcbca408ee2ed8154e81d314824a1f8599099..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/iimpi/iimpi-2021.eb +++ /dev/null @@ -1,20 +0,0 @@ -easyblock = "Toolchain" - -name = 'iimpi' -version = '2021' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = 'Intel C/C++ and Fortran compilers, alongside Intel MPI.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compver = '2021.2.0' -local_gccsuffix = '-GCC-10.3.0' -dependencies = [ - ('intel-compilers', '%s%s' % (local_compver, local_gccsuffix)), - ('impi', '2021.2.0', '', ('intel-compilers', '%s%s' % (local_compver, local_gccsuffix))), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254-gompi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.2.254-gompi-2020.eb deleted file mode 100644 index 488cb72e2639315b45d8341d18f5fbfb151f67c8..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254-gompi-2020.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020', 'name': 'gompi'} - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254-gpsmpi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.2.254-gpsmpi-2020.eb deleted file mode 100644 index 54730029b2210b3c89905d0cf5ae124334057da9..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254-gpsmpi-2020.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020', 'name': 'gpsmpi'} - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254-iimpi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.2.254-iimpi-2020.eb deleted file mode 100644 index 9f95db8af011e58d8bb49f56dbb45668a77d342a..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254-iimpi-2020.eb +++ /dev/null @@ -1,33 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020', 'name': 'iimpi'} - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254-iompi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.2.254-iompi-2020.eb deleted file mode 100644 index 80d318529f81d7ed525d8694dbffb0aabb262d49..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254-iompi-2020.eb +++ /dev/null @@ -1,32 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, and more. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020', 'name': 'iompi'} - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254-ipsmpi-2020-mt.eb b/Golden_Repo/i/imkl/imkl-2020.2.254-ipsmpi-2020-mt.eb deleted file mode 100644 index 75b8b7fb90173380d059facfa1e88d68864f0289..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,32 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, and more. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020-mt', 'name': 'ipsmpi'} - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254-ipsmpi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.2.254-ipsmpi-2020.eb deleted file mode 100644 index 49f0b79135e64c38c013f291f5b86e264aff841a..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254-ipsmpi-2020.eb +++ /dev/null @@ -1,32 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, and more. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020', 'name': 'ipsmpi'} - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254-npsmpic-2020.1.eb b/Golden_Repo/i/imkl/imkl-2020.2.254-npsmpic-2020.1.eb deleted file mode 100644 index 436919b25dcc471ddeb1016af4444c1cda28313f..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254-npsmpic-2020.1.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, and more. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -# NVHPC is not yet supported -interfaces = False - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254-npsmpic-2020.eb b/Golden_Repo/i/imkl/imkl-2020.2.254-npsmpic-2020.eb deleted file mode 100644 index 6019f9b07c7b979bc37bf6ae56819c45ac6a4a67..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254-npsmpic-2020.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, and more. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020', 'name': 'npsmpic'} - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -# NVHPC is not yet supported -interfaces = False - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.2.254.eb b/Golden_Repo/i/imkl/imkl-2020.2.254.eb deleted file mode 100644 index 225dcd057df9273c2a83ec7f7d3981edb7dee97c..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.2.254.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'imkl' -version = '2020.2.254' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['l_mkl_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -interfaces = False - -hidden = True - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304-gompi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.4.304-gompi-2020.eb deleted file mode 100644 index 54e39a9c93b8755734bded259cc79ffc28eafdd9..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304-gompi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2020', 'name': 'gompi'} - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304-gpsmpi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.4.304-gpsmpi-2020.eb deleted file mode 100644 index d33b31af46923b93b0f3a6bc6b182e08147b3720..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304-gpsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2020', 'name': 'gpsmpi'} - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304-iimpi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.4.304-iimpi-2020.eb deleted file mode 100644 index 6a4b54d862b5e9d5340e327c2019830504d53872..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304-iimpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2020', 'name': 'iimpi'} - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304-iompi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.4.304-iompi-2020.eb deleted file mode 100644 index b7148182c2e2edc3586cf7a9cbc76322e9ad2e7a..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304-iompi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2020', 'name': 'iompi'} - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304-ipsmpi-2020-mt.eb b/Golden_Repo/i/imkl/imkl-2020.4.304-ipsmpi-2020-mt.eb deleted file mode 100644 index 82c771760578e488bebecd3713f209fe6af721e2..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2020-mt', 'name': 'ipsmpi'} - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304-ipsmpi-2020.eb b/Golden_Repo/i/imkl/imkl-2020.4.304-ipsmpi-2020.eb deleted file mode 100644 index f7ae5371ae270da4f96d254380c5e090dc10411c..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304-ipsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2020', 'name': 'ipsmpi'} - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304-npsmpic-2020.1.eb b/Golden_Repo/i/imkl/imkl-2020.4.304-npsmpic-2020.1.eb deleted file mode 100644 index 215f3767c06c1146ff9ec7e1002f7dff4dde1542..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304-npsmpic-2020.1.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2020.1', 'name': 'npsmpic'} - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -# NVHPC is not yet supported -interfaces = False - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304-npsmpic-2020.eb b/Golden_Repo/i/imkl/imkl-2020.4.304-npsmpic-2020.eb deleted file mode 100644 index cc7891fed962a10ad38e24751133e940df6ff3f4..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304-npsmpic-2020.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2020', 'name': 'npsmpic'} - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -# NVHPC is not yet supported -interfaces = False - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2020.4.304.eb b/Golden_Repo/i/imkl/imkl-2020.4.304.eb deleted file mode 100644 index 4b61f6ab347958996403e13cb34cbf2dd4ae57d4..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2020.4.304.eb +++ /dev/null @@ -1,40 +0,0 @@ -name = 'imkl' -version = '2020.4.304' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = [ - 'https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16917/'] -sources = ['l_mkl_%(version)s.tgz'] -checksums = ['2314d46536974dbd08f2a4e4f9e9a155dc7e79e2798c74e7ddfaad00a5917ea5'] - -dontcreateinstalldir = 'True' - -interfaces = False - -hidden = True - -postinstallcmds = [ - # extract the examples - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_cluster_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_c.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_core_f.tgz -C %(installdir)s/mkl/examples/', - 'tar xvzf %(installdir)s/mkl/examples/examples_f95.tgz -C %(installdir)s/mkl/examples/', -] - -modextravars = { - 'MKL_EXAMPLES': '%(installdir)s/mkl/examples/', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2021.2.0-gompi-2021.eb b/Golden_Repo/i/imkl/imkl-2021.2.0-gompi-2021.eb deleted file mode 100644 index 9d5ee7e8fba50fd8053be4b81e8cf817cd9ce7d8..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2021.2.0-gompi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -name = 'imkl' -version = '2021.2.0' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2021', 'name': 'gompi'} - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/17757/'] -sources = ['l_onemkl_p_%(version)s.296_offline.sh'] -checksums = ['816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae'] - -dontcreateinstalldir = 'True' - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2021.2.0-gpsmpi-2021.eb b/Golden_Repo/i/imkl/imkl-2021.2.0-gpsmpi-2021.eb deleted file mode 100644 index 4ca8de8d8061ca6a046d35d6f057d1cccac08b33..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2021.2.0-gpsmpi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -name = 'imkl' -version = '2021.2.0' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2021', 'name': 'gpsmpi'} - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/17757/'] -sources = ['l_onemkl_p_%(version)s.296_offline.sh'] -checksums = ['816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae'] - -dontcreateinstalldir = 'True' - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2021.2.0-iimpi-2021.eb b/Golden_Repo/i/imkl/imkl-2021.2.0-iimpi-2021.eb deleted file mode 100644 index 4922c0fb1d36bd3e4093064c514697e289d3a321..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2021.2.0-iimpi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -name = 'imkl' -version = '2021.2.0' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2021', 'name': 'iimpi'} - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/17757/'] -sources = ['l_onemkl_p_%(version)s.296_offline.sh'] -checksums = ['816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae'] - -dontcreateinstalldir = 'True' - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2021.2.0-iompi-2021.eb b/Golden_Repo/i/imkl/imkl-2021.2.0-iompi-2021.eb deleted file mode 100644 index a10c59fc382518cf0da43b0274a838216d412fb2..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2021.2.0-iompi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -name = 'imkl' -version = '2021.2.0' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2021', 'name': 'iompi'} - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/17757/'] -sources = ['l_onemkl_p_%(version)s.296_offline.sh'] -checksums = ['816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae'] - -dontcreateinstalldir = 'True' - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2021.2.0-ipsmpi-2021.eb b/Golden_Repo/i/imkl/imkl-2021.2.0-ipsmpi-2021.eb deleted file mode 100644 index 420f7dc6f47b504a0987ca416e4236b7e97eaf32..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2021.2.0-ipsmpi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -name = 'imkl' -version = '2021.2.0' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2021', 'name': 'ipsmpi'} - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/17757/'] -sources = ['l_onemkl_p_%(version)s.296_offline.sh'] -checksums = ['816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae'] - -dontcreateinstalldir = 'True' - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2021.2.0-npsmpic-2021.eb b/Golden_Repo/i/imkl/imkl-2021.2.0-npsmpic-2021.eb deleted file mode 100644 index f9b401b1eb3ee871103cf6cc32b8114063aab968..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2021.2.0-npsmpic-2021.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'imkl' -version = '2021.2.0' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'version': '2021', 'name': 'npsmpic'} - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/17757/'] -sources = ['l_onemkl_p_%(version)s.296_offline.sh'] -checksums = ['816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae'] - -dontcreateinstalldir = 'True' - -# NVHPC is not yet supported -interfaces = False - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/imkl/imkl-2021.2.0.eb b/Golden_Repo/i/imkl/imkl-2021.2.0.eb deleted file mode 100644 index 8a212bfef8327b7e8b7beacb0f554972c2341c74..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/imkl/imkl-2021.2.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'imkl' -version = '2021.2.0' - -homepage = 'http://software.intel.com/en-us/intel-mkl/' -description = """Intel Math Kernel Library is a library of highly optimized, - extensively threaded math routines for science, engineering, and financial - applications that require maximum performance. Core math functions include - BLAS, LAPACK, ScaLAPACK, Sparse Solvers, Fast Fourier Transforms, Vector Math, - and more. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = SYSTEM - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/17757/'] -sources = ['l_onemkl_p_%(version)s.296_offline.sh'] -checksums = ['816e9df26ff331d6c0751b86ed5f7d243f9f172e76f14e83b32bf4d1d619dbae'] - -dontcreateinstalldir = 'True' - -interfaces = False - -hidden = True - -moduleclass = 'numlib' diff --git a/Golden_Repo/i/impi-settings/impi-settings-2019-UCX-UD.eb b/Golden_Repo/i/impi-settings/impi-settings-2019-UCX-UD.eb deleted file mode 100644 index 2f352fde562f6fa3ca191ce02b0a33b65c98506c..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi-settings/impi-settings-2019-UCX-UD.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2019' -versionsuffix = 'UCX-UD' - -homepage = '' -description = """This is a module to load the IntelMPI configuration with UCX and UD as TL""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'UCX_TLS': 'ud_x,sm,self', - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/impi-settings/impi-settings-2019-large-job-hybrid.eb b/Golden_Repo/i/impi-settings/impi-settings-2019-large-job-hybrid.eb deleted file mode 100644 index c8565ebfdec4ef393da63b0571aeb4569504f31a..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi-settings/impi-settings-2019-large-job-hybrid.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2019' -versionsuffix = 'large-job-hybrid' - -homepage = '' -description = """This is a module to load the IntelMPI configuration for large scale hybrid jobs""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'UCX_TLS': 'dc_x,sm,self', - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/impi-settings/impi-settings-2019-large-job-mpi.eb b/Golden_Repo/i/impi-settings/impi-settings-2019-large-job-mpi.eb deleted file mode 100644 index 5b8ad67bbb926ba3279b356fab4befcad25efe9f..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi-settings/impi-settings-2019-large-job-mpi.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2019' -versionsuffix = 'large-job-mpi' - -homepage = '' -description = """This is a module to load the IntelMPI configuration for large scale MPI jobs""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'UCX_TLS': 'dc_x,sm,self', - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/impi-settings/impi-settings-2019-plain.eb b/Golden_Repo/i/impi-settings/impi-settings-2019-plain.eb deleted file mode 100644 index f491d95a605c32b6f27c1601f545cb129fc82eb8..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi-settings/impi-settings-2019-plain.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2019' -versionsuffix = 'plain' - -homepage = '' -description = """This is a module to load the default IntelMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'UCX_TLS': 'dc_x,sm,self', - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/impi-settings/impi-settings-2021-UCX-UD.eb b/Golden_Repo/i/impi-settings/impi-settings-2021-UCX-UD.eb deleted file mode 100644 index 0f68cff1ff5c9a1d82481e6bbc3a33857c7fc71b..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi-settings/impi-settings-2021-UCX-UD.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2021' -versionsuffix = 'UCX-UD' - -homepage = '' -description = """This is a module to load the IntelMPI configuration with UCX and UD as TL""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'UCX_TLS': 'ud_x,sm,self', - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/impi-settings/impi-settings-2021-large-job-hybrid.eb b/Golden_Repo/i/impi-settings/impi-settings-2021-large-job-hybrid.eb deleted file mode 100644 index 4c5b2ed2ad4dbef0a1c80e2f40087ea1ac4e538b..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi-settings/impi-settings-2021-large-job-hybrid.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2021' -versionsuffix = 'large-job-hybrid' - -homepage = '' -description = """This is a module to load the IntelMPI configuration for large scale hybrid jobs""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'UCX_TLS': 'dc_x,sm,self', - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/impi-settings/impi-settings-2021-large-job-mpi.eb b/Golden_Repo/i/impi-settings/impi-settings-2021-large-job-mpi.eb deleted file mode 100644 index dbc4f7a2cb6f61772c19f7afab08bf418b042f6d..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi-settings/impi-settings-2021-large-job-mpi.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2021' -versionsuffix = 'large-job-mpi' - -homepage = '' -description = """This is a module to load the IntelMPI configuration for large scale MPI jobs""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'UCX_TLS': 'dc_x,sm,self', - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/impi-settings/impi-settings-2021-plain.eb b/Golden_Repo/i/impi-settings/impi-settings-2021-plain.eb deleted file mode 100644 index ebe18a75df90895f5ed21873b0abe1dc71879fb8..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi-settings/impi-settings-2021-plain.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2021' -versionsuffix = 'plain' - -homepage = '' -description = """This is a module to load the default IntelMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'UCX_TLS': 'dc_x,sm,self', - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/impi/impi-2019.8.254-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/i/impi/impi-2019.8.254-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 08d985e754a36e21a7cab79748ed35da89325b2e..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi/impi-2019.8.254-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -name = 'impi' -version = '2019.8.254' - -homepage = 'http://software.intel.com/en-us/intel-mpi-library/' -description = """The Intel(R) MPI Library for Linux* OS is a multi-fabric message - passing library based on ANL MPICH2 and OSU MVAPICH2. The Intel MPI Library for - Linux OS implements the Message Passing Interface, version 2 (MPI-2) specification. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020.2.254-GCC-9.3.0', 'name': 'iccifort'} - -sources = ['l_mpi_%(version)s.tgz'] - -dependencies = [ - ('UCX', '1.8.1', '', SYSTEM) -] - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'IntelMPI' -# Add a family for our naming scheme -modluafooter = 'family("mpi")' -dontcreateinstalldir = 'True' -# set up all the mpi commands to work as expected -set_mpi_wrappers_all = 'True' - -postinstallcmds = [ - "ln -s %(installdir)s/intel64/lib/release/libmpi.so %(installdir)s/intel64/lib/", - "ln -s %(installdir)s/intel64/lib %(installdir)s", - "ln -s %(installdir)s/intel64/include %(installdir)s", - "rm -Rf %(installdir)s/bin", - "ln -s %(installdir)s/intel64/bin %(installdir)s", -] - -modextrapaths = { - 'CLASSPATH': 'intel64/lib/mpi.jar', -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/i/impi/impi-2021.2.0-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/i/impi/impi-2021.2.0-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index 24291d510bc70e507094f67c5b4c5cb2d2569843..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/impi/impi-2021.2.0-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,20 +0,0 @@ -name = 'impi' -version = '2021.2.0' - -homepage = 'https://software.intel.com/content/www/us/en/develop/tools/mpi-library.html' -description = "Intel MPI Library, compatible with MPICH ABI" - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/17729/'] -sources = ['l_mpi_oneapi_p_%(version)s.215_offline.sh'] -checksums = ['d0d4cdd11edaff2e7285e38f537defccff38e37a3067c02f4af43a3629ad4aa3'] - -dependencies = [('UCX', '1.10.1', '', SYSTEM)] - -dontcreateinstalldir = 'True' -# set up all the mpi commands to work as expected -set_mpi_wrappers_all = 'True' - -moduleclass = 'mpi' diff --git a/Golden_Repo/i/intel-compilers/intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/i/intel-compilers/intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index 87b136dccdcde63e696e8bc9aebedd843371f6f5..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/intel-compilers/intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -name = 'intel-compilers' -version = '2021.2.0' -local_gccver = '10.3.0' -versionsuffix = f'-GCC-{local_gccver}' - -homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/hpc-toolkit.html' -description = "Intel C, C++ & Fortran compilers (classic and oneAPI)" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -sources = [ - { - 'source_urls': ['https://registrationcenter-download.intel.com/akdlm/irc_nas/17749/'], - 'filename': 'l_dpcpp-cpp-compiler_p_%(version)s.118_offline.sh', - }, - { - 'source_urls': ['https://registrationcenter-download.intel.com/akdlm/irc_nas/17756/'], - 'filename': 'l_fortran-compiler_p_%(version)s.136_offline.sh', - }, -] -checksums = [ - # l_dpcpp-cpp-compiler_p_2021.2.0.118_offline.sh - '5d01cbff1a574c3775510cd97ffddd27fdf56d06a6b0c89a826fb23da4336d59', - 'a62e04a80f6d2f05e67cd5acb03fa58857ee22c6bd581ec0651c0ccd5bdec5a1', # l_fortran-compiler_p_2021.2.0.136_offline.sh -] - -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.36.1', '', ('GCCcore', local_gccver)), -] -moduleclass = 'compiler' diff --git a/Golden_Repo/i/intel-para/intel-para-2020.eb b/Golden_Repo/i/intel-para/intel-para-2020.eb deleted file mode 100644 index caa95b34f536060b902ac777819a3d6578ec8f64..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/intel-para/intel-para-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'Toolchain' - -name = 'intel-para' -version = '2020' -versionsuffix = '' - -local_intelversion = '2020.2.254' -local_intelsuffix = '-GCC-9.3.0' - -homepage = '' -description = """intel-para provides Intel C/C++ and Fortran compilers, ParaStationMPI & Intel MKL. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -dependencies = [ - ('iccifort', local_intelversion, local_intelsuffix), - ('psmpi', '5.4.7-1', versionsuffix, ('iccifort', local_intelversion + local_intelsuffix)), - ('imkl', local_intelversion, versionsuffix, ('ipsmpi', version)), -] - - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/intel-para/intel-para-2021.eb b/Golden_Repo/i/intel-para/intel-para-2021.eb deleted file mode 100644 index 371743ce1975268225c8668bd07a089db8df8973..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/intel-para/intel-para-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'Toolchain' - -name = 'intel-para' -version = '2021' -versionsuffix = '' - -local_intelversion = '2021.2.0' -local_intelsuffix = '-GCC-10.3.0' - -homepage = '' -description = """intel-para provides Intel C/C++ and Fortran compilers, ParaStationMPI & Intel MKL. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -dependencies = [ - ('intel-compilers', local_intelversion, local_intelsuffix), - ('psmpi', '5.4.9-1', versionsuffix, ('intel-compilers', local_intelversion + local_intelsuffix)), - ('imkl', local_intelversion, versionsuffix, ('ipsmpi', version)), -] - - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/intel/intel-2020.eb b/Golden_Repo/i/intel/intel-2020.eb deleted file mode 100644 index 5af570d4a8162ec0574ca6029b99b04a6a5fcd65..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/intel/intel-2020.eb +++ /dev/null @@ -1,22 +0,0 @@ -easyblock = 'Toolchain' - -name = 'intel' -version = '2020' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = """Intel Cluster Toolkit Compiler Edition provides Intel C/C++ and Fortran compilers, Intel MPI & Intel MKL. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_gcc_comp_ver = '-GCC-9.3.0' -local_int_ver = '2020.2.254' -dependencies = [ - ('iccifort', local_int_ver, local_gcc_comp_ver), - ('impi', '2019.8.254', '', ('iccifort', '2020.2.254-GCC-9.3.0')), - ('imkl', local_int_ver, '', ('iimpi', '2020')), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/intel/intel-2021.eb b/Golden_Repo/i/intel/intel-2021.eb deleted file mode 100644 index fac4a276ad6b83e7912d7e60c3964cd74c0a6a18..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/intel/intel-2021.eb +++ /dev/null @@ -1,22 +0,0 @@ -easyblock = 'Toolchain' - -name = 'intel' -version = '2021' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = """Intel Cluster Toolkit Compiler Edition provides Intel C/C++ and Fortran compilers, Intel MPI & Intel MKL. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_gcc_comp_ver = '-GCC-10.3.0' -local_int_ver = '2021.2.0' -dependencies = [ - ('intel-compilers', local_int_ver, local_gcc_comp_ver), - ('impi', '2021.2.0', '', ('intel-compilers', f'{local_int_ver}{local_gcc_comp_ver}')), - ('imkl', local_int_ver, '', ('iimpi', '2021')), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/intltool/intltool-0.51.0-GCCcore-10.3.0-Perl-5.32.0.eb b/Golden_Repo/i/intltool/intltool-0.51.0-GCCcore-10.3.0-Perl-5.32.0.eb deleted file mode 100644 index d627fa450255993959117876445969b7ce3e3527..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/intltool/intltool-0.51.0-GCCcore-10.3.0-Perl-5.32.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'intltool' -version = '0.51.0' -versionsuffix = '-Perl-%(perlver)s' - -homepage = 'http://freedesktop.org/wiki/Software/intltool/' -description = """intltool is a set of tools to centralize translation of - many different file formats using GNU gettext-compatible PO files. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://launchpad.net/intltool/trunk/%(version)s/+download/'] -sources = [SOURCE_TAR_GZ] - -patches = [ - 'intltool_perl_5.26.patch' -] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Perl', '5.32.0'), -] - -sanity_check_paths = { - 'files': ['bin/intltool%s' % x for x in ['-extract', '-merge', '-prepare', '-update', 'ize']], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/i/intltool/intltool-0.51.0-GCCcore-9.3.0-Perl-5.32.0.eb b/Golden_Repo/i/intltool/intltool-0.51.0-GCCcore-9.3.0-Perl-5.32.0.eb deleted file mode 100644 index 11be2a386028d73a51dbbc3cc9e9ce9a5abe0347..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/intltool/intltool-0.51.0-GCCcore-9.3.0-Perl-5.32.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'intltool' -version = '0.51.0' -versionsuffix = '-Perl-%(perlver)s' - -homepage = 'http://freedesktop.org/wiki/Software/intltool/' -description = """intltool is a set of tools to centralize translation of - many different file formats using GNU gettext-compatible PO files. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://launchpad.net/intltool/trunk/%(version)s/+download/'] -sources = [SOURCE_TAR_GZ] - -patches = [ - 'intltool_perl_5.26.patch' -] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Perl', '5.32.0'), -] - -sanity_check_paths = { - 'files': ['bin/intltool%s' % x for x in ['-extract', '-merge', '-prepare', '-update', 'ize']], - 'dirs': [] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/i/intltool/intltool_perl_5.26.patch b/Golden_Repo/i/intltool/intltool_perl_5.26.patch deleted file mode 100644 index 7f66aad0d230a367a04f4ececdf967223dca8280..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/intltool/intltool_perl_5.26.patch +++ /dev/null @@ -1,51 +0,0 @@ -diff -ruN intltool-0.51.0.orig/intltool-update.in intltool-0.51.0/intltool-update.in ---- intltool-0.51.0.orig/intltool-update.in 2015-03-09 02:39:54.000000000 +0100 -+++ intltool-0.51.0/intltool-update.in 2017-10-13 18:30:25.929546841 +0200 -@@ -1062,13 +1062,13 @@ - } - } - -- if ($str =~ /^(.*)\${?([A-Z_]+)}?(.*)$/) -+ if ($str =~ /^(.*)\$\{?([A-Z_]+)}?(.*)$/) - { - my $rest = $3; - my $untouched = $1; - my $sub = ""; - # Ignore recursive definitions of variables -- $sub = $varhash{$2} if defined $varhash{$2} and $varhash{$2} !~ /\${?$2}?/; -+ $sub = $varhash{$2} if defined $varhash{$2} and $varhash{$2} !~ /\$\{?$2}?/; - - return SubstituteVariable ("$untouched$sub$rest"); - } -@@ -1190,10 +1190,10 @@ - $name =~ s/\(+$//g; - $version =~ s/\(+$//g; - -- $varhash{"PACKAGE_NAME"} = $name if (not $name =~ /\${?AC_PACKAGE_NAME}?/); -- $varhash{"PACKAGE"} = $name if (not $name =~ /\${?PACKAGE}?/); -- $varhash{"PACKAGE_VERSION"} = $version if (not $name =~ /\${?AC_PACKAGE_VERSION}?/); -- $varhash{"VERSION"} = $version if (not $name =~ /\${?VERSION}?/); -+ $varhash{"PACKAGE_NAME"} = $name if (not $name =~ /\$\{?AC_PACKAGE_NAME}?/); -+ $varhash{"PACKAGE"} = $name if (not $name =~ /\$\{?PACKAGE}?/); -+ $varhash{"PACKAGE_VERSION"} = $version if (not $name =~ /\$\{?AC_PACKAGE_VERSION}?/); -+ $varhash{"VERSION"} = $version if (not $name =~ /\$\{?VERSION}?/); - } - - if ($conf_source =~ /^AC_INIT\(([^,\)]+),([^,\)]+)[,]?([^,\)]+)?/m) -@@ -1219,11 +1219,11 @@ - $version =~ s/\(+$//g; - $bugurl =~ s/\(+$//g if (defined $bugurl); - -- $varhash{"PACKAGE_NAME"} = $name if (not $name =~ /\${?AC_PACKAGE_NAME}?/); -- $varhash{"PACKAGE"} = $name if (not $name =~ /\${?PACKAGE}?/); -- $varhash{"PACKAGE_VERSION"} = $version if (not $name =~ /\${?AC_PACKAGE_VERSION}?/); -- $varhash{"VERSION"} = $version if (not $name =~ /\${?VERSION}?/); -- $varhash{"PACKAGE_BUGREPORT"} = $bugurl if (defined $bugurl and not $bugurl =~ /\${?\w+}?/); -+ $varhash{"PACKAGE_NAME"} = $name if (not $name =~ /\$\{?AC_PACKAGE_NAME}?/); -+ $varhash{"PACKAGE"} = $name if (not $name =~ /\$\{?PACKAGE}?/); -+ $varhash{"PACKAGE_VERSION"} = $version if (not $name =~ /\$\{?AC_PACKAGE_VERSION}?/); -+ $varhash{"VERSION"} = $version if (not $name =~ /\$\{?VERSION}?/); -+ $varhash{"PACKAGE_BUGREPORT"} = $bugurl if (defined $bugurl and not $bugurl =~ /\$\{?\w+}?/); - } - - # \s makes this not work, why? diff --git a/Golden_Repo/i/iomkl/iomkl-2020.eb b/Golden_Repo/i/iomkl/iomkl-2020.eb deleted file mode 100644 index 621f0a3589bb1f874d3931493717a0e9ca1589a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/iomkl/iomkl-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'Toolchain' - -name = 'iomkl' -version = '2020' -versionsuffix = '' - -local_intelversion = '2020.2.254' -local_intelsuffix = '-GCC-9.3.0' - -homepage = '' -description = """iomkl provides Intel C/C++ and Fortran compilers, ParaStationMPI & Intel MKL. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -dependencies = [ - ('iccifort', local_intelversion, local_intelsuffix), - ('OpenMPI', '4.1.0rc1', versionsuffix, ('iccifort', local_intelversion + local_intelsuffix)), - ('imkl', local_intelversion, versionsuffix, ('iompi', version)), -] - - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/iomkl/iomkl-2021.eb b/Golden_Repo/i/iomkl/iomkl-2021.eb deleted file mode 100644 index db8b015ebdc64e7ed122631e3d36a6c43c2d6aff..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/iomkl/iomkl-2021.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'Toolchain' - -name = 'iomkl' -version = '2021' -versionsuffix = '' - -local_intelversion = '2021.2.0' -local_intelsuffix = '-GCC-10.3.0' - -homepage = '' -description = """iomkl provides Intel C/C++ and Fortran compilers, ParaStationMPI & Intel MKL. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -dependencies = [ - ('intel-compilers', local_intelversion, local_intelsuffix), - ('OpenMPI', '4.1.1', versionsuffix, ('intel-compilers', local_intelversion + local_intelsuffix)), - ('imkl', local_intelversion, versionsuffix, ('iompi', version)), -] - - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/iompi/iompi-2020.eb b/Golden_Repo/i/iompi/iompi-2020.eb deleted file mode 100644 index aca9f01330beffb9715142bf80e900b494ae4202..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/iompi/iompi-2020.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = "Toolchain" - -name = 'iompi' -version = '2020' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = """Intel C/C++ and Fortran compilers, alongside ParaStationMPI. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compver = '2020.2.254' -local_gccsuffix = '-GCC-9.3.0' -dependencies = [ - ('iccifort', '%s%s' % (local_compver, local_gccsuffix)), - ('OpenMPI', '4.1.0rc1', '', ('iccifort', '%s%s' % (local_compver, local_gccsuffix))), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/iompi/iompi-2021.eb b/Golden_Repo/i/iompi/iompi-2021.eb deleted file mode 100644 index 9b7e1d154827d46469fbd40283f9d47c44a0857f..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/iompi/iompi-2021.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = "Toolchain" - -name = 'iompi' -version = '2021' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = """Intel C/C++ and Fortran compilers, alongside ParaStationMPI. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compver = '2021.2.0' -local_gccsuffix = '-GCC-10.3.0' -dependencies = [ - ('intel-compilers', '%s%s' % (local_compver, local_gccsuffix)), - ('OpenMPI', '4.1.1', '', ('intel-compilers', '%s%s' % (local_compver, local_gccsuffix))), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/ipp/ipp-2020.3.304.eb b/Golden_Repo/i/ipp/ipp-2020.3.304.eb deleted file mode 100644 index 65e68f18ecfccf327b2ffa9b0142fd753e3e060b..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ipp/ipp-2020.3.304.eb +++ /dev/null @@ -1,25 +0,0 @@ -name = 'ipp' -version = '2020.3.304' - -homepage = 'http://software.intel.com/en-us/articles/intel-ipp/' -description = """Intel Integrated Performance Primitives (Intel IPP) is an extensive library - of multicore-ready, highly optimized software functions for multimedia, data processing, - and communications applications. Intel IPP offers thousands of optimized functions - covering frequently used fundamental algorithms. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['l_ipp_%(version)s.tgz'] - -dontcreateinstalldir = True - -requires_runtime_license = False - -modextravars = { - 'IPPROOT': '%(installdir)s/ipp', -} - -moduleclass = 'perf' diff --git a/Golden_Repo/i/ipsmpi/ipsmpi-2020-mt.eb b/Golden_Repo/i/ipsmpi/ipsmpi-2020-mt.eb deleted file mode 100644 index edead3eafb03e8f61c909ef28fd9d53decb95282..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ipsmpi/ipsmpi-2020-mt.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = "Toolchain" - -name = 'ipsmpi' -version = '2020' -versionsuffix = '-mt' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = 'Intel C/C++ and Fortran compilers, alongside ParaStationMPI.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compver = '2020.2.254' -local_gccsuffix = '-GCC-9.3.0' -dependencies = [ - ('iccifort', '%s%s' % (local_compver, local_gccsuffix)), - ('psmpi', '5.4.7-1', versionsuffix, ('iccifort', '%s%s' % (local_compver, local_gccsuffix))), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/ipsmpi/ipsmpi-2020.eb b/Golden_Repo/i/ipsmpi/ipsmpi-2020.eb deleted file mode 100644 index 3880a90dcf4eb2b07d8c3ee81e90a5b74b6ad5db..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ipsmpi/ipsmpi-2020.eb +++ /dev/null @@ -1,20 +0,0 @@ -easyblock = "Toolchain" - -name = 'ipsmpi' -version = '2020' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = 'Intel C/C++ and Fortran compilers, alongside ParaStationMPI.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compver = '2020.2.254' -local_gccsuffix = '-GCC-9.3.0' -dependencies = [ - ('iccifort', '%s%s' % (local_compver, local_gccsuffix)), - ('psmpi', '5.4.7-1', '', ('iccifort', '%s%s' % (local_compver, local_gccsuffix))), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/ipsmpi/ipsmpi-2021.eb b/Golden_Repo/i/ipsmpi/ipsmpi-2021.eb deleted file mode 100644 index 5ade94c781a64c5be44c555c1805822e27a20da5..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ipsmpi/ipsmpi-2021.eb +++ /dev/null @@ -1,20 +0,0 @@ -easyblock = "Toolchain" - -name = 'ipsmpi' -version = '2021' - -homepage = 'http://software.intel.com/en-us/intel-cluster-toolkit-compiler/' -description = 'Intel C/C++ and Fortran compilers, alongside ParaStationMPI.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compver = '2021.2.0' -local_gccsuffix = '-GCC-10.3.0' -dependencies = [ - ('intel-compilers', '%s%s' % (local_compver, local_gccsuffix)), - ('psmpi', '5.4.9-1', '', ('intel-compilers', '%s%s' % (local_compver, local_gccsuffix))), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/i/ispc/ispc-1.12.0.eb b/Golden_Repo/i/ispc/ispc-1.12.0.eb deleted file mode 100644 index b822459e65bf70d1f8577d9b8c746c74014a0176..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ispc/ispc-1.12.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'Tarball' - -name = 'ispc' -version = '1.12.0' - -homepage = 'http://ispc.github.io/ , https://github.com/ispc/ispc/' -description = """Intel SPMD Program Compilers; An open-source compiler for high-performance - SIMD programming on the CPU. ispc is a compiler for a variant of the C programming language, - with extensions for 'single program, multiple data' (SPMD) programming. - Under the SPMD model, the programmer writes a program that generally appears - to be a regular serial program, though the execution model is actually that - a number of program instances execute in parallel on the hardware. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['ispc-v%(version)s-linux.tar.gz'] -source_urls = [('https://github.com/ispc/ispc/releases/download/v%(version)s')] - -sanity_check_paths = { - 'files': ["bin/ispc"], - 'dirs': [] -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/ispc/ispc-1.14.1.eb b/Golden_Repo/i/ispc/ispc-1.14.1.eb deleted file mode 100644 index ac3d431d3d9209dd01f1955fb3f63569a2ea326b..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/ispc/ispc-1.14.1.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'Tarball' - -name = 'ispc' -version = '1.14.1' - -homepage = 'http://ispc.github.io/ , https://github.com/ispc/ispc/' -description = """Intel SPMD Program Compilers; An open-source compiler for high-performance - SIMD programming on the CPU. ispc is a compiler for a variant of the C programming language, - with extensions for 'single program, multiple data' (SPMD) programming. - Under the SPMD model, the programmer writes a program that generally appears - to be a regular serial program, though the execution model is actually that - a number of program instances execute in parallel on the hardware. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['ispc-v%(version)s-linux.tar.gz'] -source_urls = [('https://github.com/ispc/ispc/releases/download/v%(version)s')] - -sanity_check_paths = { - 'files': ["bin/ispc"], - 'dirs': [] -} - -moduleclass = 'system' diff --git a/Golden_Repo/i/itac/itac-2020.2.031.eb b/Golden_Repo/i/itac/itac-2020.2.031.eb deleted file mode 100644 index c52ed1b9b8948ab00127f9f9bbf4ce489bf86814..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/itac/itac-2020.2.031.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'itac' -version = '2020.2.031' - -homepage = 'http://software.intel.com/en-us/intel-trace-analyzer/' -description = """The Intel Trace Collector is a low-overhead tracing library that performs - event-based tracing in applications. The Intel Trace Analyzer provides a convenient way to monitor application - activities gathered by the Intel Trace Collector through graphical displays. -""" - -usage = """ - Basic usage: - 1. module load itac - - Then either method 1: Static Linking - 2. Instrument application by linking with -L$VT_LIB_DIR -lVT $VT_ADD_LIBS - 3. Measure data by executing instrumented application - - or method 2: Library preloading - 2. export LD_PRELOAD=$VT_SLIB_DIR/libVT.so - 3. Measure data by executing uninstrumented application - - Afterwards - 4. traceanalyzer <exe>.stf -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['l_itac_p_%(version)s.tgz'] - -dontcreateinstalldir = 'True' - -preferredmpi = 'impi5' - -moduleclass = 'tools' diff --git a/Golden_Repo/i/itstool/itstool-2.0.6-GCCcore-10.3.0.eb b/Golden_Repo/i/itstool/itstool-2.0.6-GCCcore-10.3.0.eb deleted file mode 100644 index 1ac41b00a1693439a5262291485b0d1f93a53ee7..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/itstool/itstool-2.0.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'itstool' -version = '2.0.6' - -homepage = 'http://itstool.org/' -description = """ -ITS Tool allows you to translate your XML documents with PO files, using rules -from the W3C Internationalization Tag Set (ITS) to determine what to translate and how to separate -it into PO file messages. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -source_urls = ['http://files.itstool.org/itstool'] -sources = [SOURCE_TAR_BZ2] - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -builddependencies = [ - ('binutils', '2.36.1') -] - -dependencies = [ - ('Python', '3.8.5'), - ('libxml2-python', '2.9.10', '-Python-%(pyver)s'), -] - -sanity_check_paths = { - 'files': ["bin/itstool"], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/i/itstool/itstool-2.0.6-GCCcore-9.3.0.eb b/Golden_Repo/i/itstool/itstool-2.0.6-GCCcore-9.3.0.eb deleted file mode 100644 index f89581c3b7d94dc79489d807a006ba6534e6a20b..0000000000000000000000000000000000000000 --- a/Golden_Repo/i/itstool/itstool-2.0.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'itstool' -version = '2.0.6' - -homepage = 'http://itstool.org/' -description = """ -ITS Tool allows you to translate your XML documents with PO files, using rules -from the W3C Internationalization Tag Set (ITS) to determine what to translate and how to separate -it into PO file messages. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -source_urls = ['http://files.itstool.org/itstool'] -sources = [SOURCE_TAR_BZ2] - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('Python', '3.8.5'), - ('libxml2-python', '2.9.10', '-Python-%(pyver)s'), -] - -sanity_check_paths = { - 'files': ["bin/itstool"], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JAX/JAX-0.2.11-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/j/JAX/JAX-0.2.11-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index ccc70f39a221daed4b04cd70c33095a02768469b..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JAX/JAX-0.2.11-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,79 +0,0 @@ -import os -easyblock = 'PythonPackage' - -name = 'JAX' -version = '0.2.11' -versionsuffix = '-Python-%(pyver)s' -local_cudaver = '11.3' - -homepage = "https://github.com/google/jax" -description = """JAX is Autograd and XLA, brought together for high-performance machine learning research.""" - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -dependencies = [ - ('binutils', '2.36.1'), - ('CUDA', '11.3', '', SYSTEM), - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('cuDNN', '8.2.1.32', '-CUDA-%s' % local_cudaver, True), - ('libjpeg-turbo', '2.0.5'), -] - -github_account = 'google' -source_urls = [GITHUB_LOWER_SOURCE] -sources = ["%(namelower)s-v%(version)s.tar.gz"] -checksums = ['82505dca6c3171e0e5b6ba4dfe69e0322e27dc66e6bdd4a78e6119e3148ae413'] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -exts_defaultclass = 'PythonPackage' -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'sanity_pip_check': True, -} - -use_pip = True - -exts_list = [ - ('flatbuffers', '1.12', { - 'source_tmpl': 'flatbuffers-%(version)s-py2.py3-none-any.whl', - 'unpack_sources': False, - 'checksums': ['9e9ef47fa92625c4721036e7c4124182668dc6021d9e7c73704edd395648deb9'], - }), - ('absl-py', '0.10.0', { - 'modulename': 'absl', - 'checksums': ['b20f504a7871a580be5268a18fbad48af4203df5d33dbc9272426cb806245a45'], - }), - ('jaxlib', '0.1.64', { - 'source_tmpl': 'jaxlib-0.1.64+cuda110-cp38-none-manylinux2010_x86_64.whl', - 'source_urls': ['https://storage.googleapis.com/jax-releases/cuda110/'], - 'unpack_sources': False, - 'checksums': ['36f20d9d54482da7239f38e59d5301a15a64a56fa35b6c8ba31f7871504dba2a'], - }), - ('opt_einsum', '3.3.0', { - 'source_tmpl': 'opt_einsum-%(version)s-py3-none-any.whl', - 'unpack_sources': False, - 'checksums': ['2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147'], - }), -] - -# This should be modextravars, but life is unfair -# Jaxlib searches in hardcoded paths for CUDA, or in those variables -modluafooter = """ -local cuda_root = os.getenv("EBROOTCUDA") or "" -setenv("CUDA_DIR", cuda_root) -setenv("XLA_FLAGS", "--xla_gpu_cuda_data_dir=" .. cuda_root) -""" - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/jax', - 'lib/python%(pyshortver)s/site-packages/jaxlib'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/j/JUBE/JUBE-2.4.0.eb b/Golden_Repo/j/JUBE/JUBE-2.4.0.eb deleted file mode 100644 index d3880ac492fa22127e275ad074c8f6ce2c16b8cc..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JUBE/JUBE-2.4.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = "VersionIndependentPythonPackage" - -name = "JUBE" -version = "2.4.0" - -homepage = "https://www.fz-juelich.de/jsc/jube" -description = """The JUBE benchmarking environment provides a script based -framework to easily create benchmark sets, run those sets on different -computer systems and evaluate the results. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://apps.fz-juelich.de/jsc/jube/jube2/download.php?file='] -sources = [SOURCE_TAR_GZ] -checksums = ['87c02555f3d1a8ecaff139cf8e7a7167cabd1049c8cc77f1bd8f4484e210d524'] - -options = {'modulename': 'jube2'} - -sanity_check_paths = { - 'files': ['bin/jube'], - 'dirs': [], -} - -modextrapaths = { - 'JUBE_INCLUDE_PATH': 'platform/slurm' -} - -modluafooter = 'execute {cmd=\'eval "$(jube complete)"\',modeA={"load"}}' - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JUBE/JUBE-2.4.1.eb b/Golden_Repo/j/JUBE/JUBE-2.4.1.eb deleted file mode 100644 index 9bfcf98a893774c196fc39d78abbf2bbb1ca8075..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JUBE/JUBE-2.4.1.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = "VersionIndependentPythonPackage" - -name = "JUBE" -version = "2.4.1" - -homepage = "https://www.fz-juelich.de/jsc/jube" -description = """The JUBE benchmarking environment provides a script based -framework to easily create benchmark sets, run those sets on different -computer systems and evaluate the results. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://apps.fz-juelich.de/jsc/jube/jube2/download.php?file='] -sources = [SOURCE_TAR_GZ] -checksums = ['d5d4a33fd339c7cd721a2836998605b9e492455c7bf755c64c7fd45e07be9016'] - -options = {'modulename': 'jube2'} - -sanity_check_paths = { - 'files': ['bin/jube'], - 'dirs': [], -} - -modextrapaths = { - 'JUBE_INCLUDE_PATH': 'platform/slurm' -} - -modluafooter = 'execute {cmd=\'eval "$(jube complete)"\',modeA={"load"}}' - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JasPer/JasPer-2.0.19-GCCcore-10.3.0.eb b/Golden_Repo/j/JasPer/JasPer-2.0.19-GCCcore-10.3.0.eb deleted file mode 100644 index 29d8ab000c03284b60a5606a115ccdc900ad71d8..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JasPer/JasPer-2.0.19-GCCcore-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'JasPer' -version = '2.0.19' - -homepage = 'http://www.ece.uvic.ca/~frodo/jasper/' -description = """The JasPer Project is an open-source initiative to provide a - free software-based reference implementation of the codec specified in the - JPEG-2000 Part-1 standard. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -sources = ['version-%(version)s.tar.gz'] -source_urls = ['https://github.com/mdadams/jasper/archive/'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM) -] - -dependencies = [ - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('libjpeg-turbo', '2.0.5'), -] - -separate_build_dir = True - -# For some reason on KNL it fails to build without this -preconfigopts = 'export LDFLAGS="$LDFLAGS -lGLU" && ' - -configopts = '-DJAS_ENABLE_AUTOMATIC_DEPENDENCIES=OFF -DJAS_ENABLE_DOC=OFF' - -sanity_check_paths = { - 'files': ["bin/jasper", "bin/jiv", "lib64/libjasper.so"], - 'dirs': ["include"], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/j/JasPer/JasPer-2.0.19-GCCcore-9.3.0.eb b/Golden_Repo/j/JasPer/JasPer-2.0.19-GCCcore-9.3.0.eb deleted file mode 100644 index 708c4f04c913dd32943ca0bc143e292e9f304e95..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JasPer/JasPer-2.0.19-GCCcore-9.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'JasPer' -version = '2.0.19' - -homepage = 'http://www.ece.uvic.ca/~frodo/jasper/' -description = """The JasPer Project is an open-source initiative to provide a - free software-based reference implementation of the codec specified in the - JPEG-2000 Part-1 standard. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = ['version-%(version)s.tar.gz'] -source_urls = ['https://github.com/mdadams/jasper/archive/'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0') -] - -dependencies = [ - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('libjpeg-turbo', '2.0.5'), -] - -separate_build_dir = True - -# For some reason on KNL it fails to build without this -preconfigopts = 'export LDFLAGS="$LDFLAGS -lGLU" && ' - -configopts = '-DJAS_ENABLE_AUTOMATIC_DEPENDENCIES=OFF -DJAS_ENABLE_DOC=OFF' - -sanity_check_paths = { - 'files': ["bin/jasper", "bin/jiv", "lib64/libjasper.so"], - 'dirs': ["include"], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/j/Java/Java-1.8.eb b/Golden_Repo/j/Java/Java-1.8.eb deleted file mode 100644 index 13e98c19d6f431b26b312fa29e41a5b8197deb9e..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Java/Java-1.8.eb +++ /dev/null @@ -1,15 +0,0 @@ -easyblock = 'ModuleRC' - -name = 'Java' -version = '1.8' - -homepage = 'https://openjdk.java.net' -description = """Open JDK lets you develop and deploy Java applications on desktops and servers.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -dependencies = [('Java', '%(version)s_292-b10-OpenJDK')] - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Java/Java-1.8_292-b10-OpenJDK.eb b/Golden_Repo/j/Java/Java-1.8_292-b10-OpenJDK.eb deleted file mode 100644 index baeabcfd6e70844575444aff868b6f2bf679ec02..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Java/Java-1.8_292-b10-OpenJDK.eb +++ /dev/null @@ -1,21 +0,0 @@ -name = 'Java' -local_patch_version = '292' -local_java_version = '8' -local_openjdk_version = 'b10' -local_openj9_version = '0.26.0' -version = '1.%s_%s' % (local_java_version, local_patch_version) -versionsuffix = '-b10-OpenJDK' - -homepage = 'https://openjdk.java.net' -description = """Open JDK lets you develop and deploy Java applications on desktops and servers.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk%su%s-%s_openj9-%s' % - (local_java_version, local_patch_version, local_openjdk_version, local_openj9_version)] -sources = ['OpenJDK8U-jdk_x64_linux_openj9_%su%s%s_openj9-%s.tar.gz' % - (local_java_version, local_patch_version, local_openjdk_version, local_openj9_version)] - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Java/Java-15.0.1.eb b/Golden_Repo/j/Java/Java-15.0.1.eb deleted file mode 100644 index 8a385d353f128ab7b2d8d8bdff34c987fe6570ff..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Java/Java-15.0.1.eb +++ /dev/null @@ -1,16 +0,0 @@ -name = 'Java' -version = '15.0.1' - -homepage = 'http://openjdk.java.net' -description = """Java Platform, Standard Edition (Java SE) lets you develop and deploy - Java applications on desktops and servers.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://download.java.net/java/GA/jdk%(version)s/51f4f36ad4ef43e39d0dfdbaf6549e32/9/GPL/'] -sources = ['openjdk-%(version)s_linux-x64_bin.tar.gz'] -checksums = ['83ec3a7b1649a6b31e021cde1e58ab447b07fb8173489f27f427e731c89ed84a'] - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Java/Java-15.eb b/Golden_Repo/j/Java/Java-15.eb deleted file mode 100644 index 78c52a44db6d95cbedb8977958761b2c9773a3a6..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Java/Java-15.eb +++ /dev/null @@ -1,14 +0,0 @@ -easyblock = 'ModuleRC' - -name = 'Java' -version = '15' - -homepage = 'https://java.com/' -description = """Java Platform, Standard Edition (Java SE) lets you develop and deploy - Java applications on desktops and servers.""" - -toolchain = SYSTEM - -dependencies = [('Java', '%(version)s.0.1')] - -moduleclass = 'lang' diff --git a/Golden_Repo/j/JsonCpp/JsonCpp-1.9.4-GCCcore-9.3.0.eb b/Golden_Repo/j/JsonCpp/JsonCpp-1.9.4-GCCcore-9.3.0.eb deleted file mode 100644 index 5e800c1826ba78cd1cc01263423547f3a1c5237f..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JsonCpp/JsonCpp-1.9.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = "CMakeNinja" - -name = 'JsonCpp' -version = '1.9.4' - -homepage = 'https://open-source-parsers.github.io/jsoncpp-docs/doxygen/index.html' -description = """ JsonCpp is a C++ library that allows manipulating JSON values, - including serialization and deserialization to and from strings. It can also preserve existing comment in - unserialization/serialization steps, making it a convenient format to store user input files. """ - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://github.com/open-source-parsers/jsoncpp/archive'] -sources = ['%(version)s.tar.gz'] - - -builddependencies = [ - ('CMake', '3.18.0'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), - ('binutils', '2.34'), -] - -sanity_check_paths = { - 'files': ['include/json/json.h', 'lib64/libjsoncpp.so', 'lib64/libjsoncpp_static.a'], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/j/Julia.CUDA/Julia.CUDA-gcccoremkl-9.3.0-2020.2.254.eb b/Golden_Repo/j/Julia.CUDA/Julia.CUDA-gcccoremkl-9.3.0-2020.2.254.eb deleted file mode 100644 index 06852dc04f2d3fdf24c25f842248648df4efcb3d..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Julia.CUDA/Julia.CUDA-gcccoremkl-9.3.0-2020.2.254.eb +++ /dev/null @@ -1,55 +0,0 @@ -easyblock = 'PackedBinary' -name = 'Julia.CUDA' -version = '2.0.2' - -homepage = 'https://juliagpu.gitlab.io/CUDA.jl' -description = """The CUDA.jl package is the main entrypoint for for programming NVIDIA GPUs using CUDA. -The package makes it possible to do so at various abstraction levels, -from easy-to-use arrays down to hand-written kernels using low-level CUDA APIs. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -sources = [{ - 'source_urls': ['https://github.com/JuliaGPU/CUDA.jl/archive/'], - 'filename': 'CUDA.jl-%(version)s.tar.gz', - 'download_filename': 'v%(version)s.tar.gz', - 'extract_cmd': "tar xfvz %s", -}] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Julia', '1.5.2', '', ('gcccoremkl', '9.3.0-2020.2.254')), - ('CUDA', '11.0', '', SYSTEM), -] - -extract_sources = True -install_cmd = ( - 'cd %(builddir)s/CUDA.jl-%(version)s/; ' - 'export JULIA_DEPOT_PATH=%(installdir)s; ' - '$EBROOTJULIA/bin/julia -e \'using Pkg; Pkg.add("CUDA"); Pkg.build("CUDA", verbose=true)\'; ' - '$EBROOTJULIA/bin/julia -e \'import CUDA\' ') - -modextrapaths = { - 'PATH': 'bin', - 'JULIA_DEPOT_PATH': '', -} - -# Ensure that the user-specific $HOME/.julia is always first entry in JULIA_DEPOT_PATH -modluafooter = """ -prepend_path("JULIA_DEPOT_PATH", pathJoin(os.getenv("HOME"), ".julia")) -add_property("arch","gpu") -""" - -sanity_check_paths = { - 'files': [], - 'dirs': ['registries', 'packages/CUDA'] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Julia.MPI/Julia.MPI-0.15.1-gompi-2020.eb b/Golden_Repo/j/Julia.MPI/Julia.MPI-0.15.1-gompi-2020.eb deleted file mode 100644 index d62cc6a734b3f0806e4a7f88d668ad774982c647..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Julia.MPI/Julia.MPI-0.15.1-gompi-2020.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = 'PackedBinary' -name = 'Julia.MPI' -version = '0.15.1' - -homepage = 'https://juliaparallel.github.io/MPI.jl/stable/configuration/' -description = """This provides Julia interface to the Message Passing Interface (MPI), -roughly inspired by mpi4py. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -sources = [{ - 'source_urls': ['https://github.com/JuliaParallel/MPI.jl/archive/'], - 'filename': 'MPI.jl-%(version)s.tar.gz', - 'download_filename': 'v%(version)s.tar.gz', - 'extract_cmd': "tar xfvz %s", -}] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Julia', '1.5.2', '', ('gcccoremkl', '9.3.0-2020.2.254')), -] - -extract_sources = True -install_cmd = ( - 'cd %(builddir)s/MPI.jl-%(version)s/; ' - 'export JULIA_DEPOT_PATH=%(installdir)s; ' - '$EBROOTJULIA/bin/julia -e \'ENV["JULIA_MPI_BINARY"]="system"; using Pkg; Pkg.add("MPI"); Pkg.build("MPI")\'; ' - '$EBROOTJULIA/bin/julia -e \'import MPI; MPI.install_mpiexecjl()\' ') - -modextrapaths = { - 'PATH': 'bin', - 'JULIA_DEPOT_PATH': '', -} - -# Ensure that the user-specific $HOME/.julia is always first entry in JULIA_DEPOT_PATH -modluafooter = """ -prepend_path("JULIA_DEPOT_PATH", pathJoin(os.getenv("HOME"), ".julia")) -""" - -sanity_check_paths = { - 'files': ['bin/mpiexecjl'], - 'dirs': ['bin', 'registries', 'packages/MPI'] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Julia.MPI/Julia.MPI-0.15.1-gpsmpi-2020.eb b/Golden_Repo/j/Julia.MPI/Julia.MPI-0.15.1-gpsmpi-2020.eb deleted file mode 100644 index e668e55625fb15b56856593aa9aae7f80bf92eb6..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Julia.MPI/Julia.MPI-0.15.1-gpsmpi-2020.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = 'PackedBinary' -name = 'Julia.MPI' -version = '0.15.1' - -homepage = 'https://juliaparallel.github.io/MPI.jl/stable/configuration/' -description = """This provides Julia interface to the Message Passing Interface (MPI), -roughly inspired by mpi4py. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -sources = [{ - 'source_urls': ['https://github.com/JuliaParallel/MPI.jl/archive/'], - 'filename': 'MPI.jl-%(version)s.tar.gz', - 'download_filename': 'v%(version)s.tar.gz', - 'extract_cmd': "tar xfvz %s", -}] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Julia', '1.5.2', '', ('gcccoremkl', '9.3.0-2020.2.254')), -] - -extract_sources = True -install_cmd = ( - 'cd %(builddir)s/MPI.jl-%(version)s/; ' - 'export JULIA_DEPOT_PATH=%(installdir)s; ' - '$EBROOTJULIA/bin/julia -e \'ENV["JULIA_MPI_BINARY"]="system"; using Pkg; Pkg.add("MPI"); Pkg.build("MPI")\'; ' - '$EBROOTJULIA/bin/julia -e \'import MPI; MPI.install_mpiexecjl()\' ') - -modextrapaths = { - 'PATH': 'bin', - 'JULIA_DEPOT_PATH': '', -} - -# Ensure that the user-specific $HOME/.julia is always first entry in JULIA_DEPOT_PATH -modluafooter = """ -prepend_path("JULIA_DEPOT_PATH", pathJoin(os.getenv("HOME"), ".julia")) -""" - -sanity_check_paths = { - 'files': ['bin/mpiexecjl'], - 'dirs': ['bin', 'registries', 'packages/MPI'] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Julia/Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254.eb b/Golden_Repo/j/Julia/Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254.eb deleted file mode 100644 index 4abe1ba5c353839ced746c17fcb7f43e06004468..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Julia/Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' -name = 'Julia' -version = '1.5.2' - -homepage = 'https://julialang.org/' -description = """Julia was designed from the beginning for high performance. -Julia programs compile to efficient native code for multiple platforms via LLVM -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/JuliaLang/julia/releases/download/v%(version)s/'] -sources = ['julia-%(version)s-full.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('git', '2.28.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('GMP', '6.2.0'), -] - -osdependencies = [('openssl')] - -skipsteps = ['configure'] -buildopts = " USE_SYSTEM_GMP=1 USE_SYSTEM_CURL=1 USE_INTEL_MKL=1 " -installopts = "prefix=%(installdir)s " - -modextrapaths = { - 'PATH': 'bin', -} - -sanity_check_paths = { - 'files': ['bin/julia', 'include/julia/julia.h', 'lib/libjulia.so'], - 'dirs': ['bin', 'etc', 'include', 'lib', 'share'] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Julia/Julia-1.6.1-gcccoremkl-10.3.0-2021.2.0.eb b/Golden_Repo/j/Julia/Julia-1.6.1-gcccoremkl-10.3.0-2021.2.0.eb deleted file mode 100644 index 030ad37c6ea11fbbd4023baf6d8ec550a0432337..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Julia/Julia-1.6.1-gcccoremkl-10.3.0-2021.2.0.eb +++ /dev/null @@ -1,186 +0,0 @@ -name = 'Julia' -version = '1.6.1' - -homepage = 'https://julialang.org/' -description = """Julia was designed from the beginning for high performance. -Julia programs compile to efficient native code for multiple platforms via LLVM -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True, 'verbose': True} # , 'usempi': True} - -source_urls = ['https://github.com/JuliaLang/julia/releases/download/v%(version)s/'] -sources = ['julia-%(version)s-full.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('git', '2.28.0'), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Python', '3.8.5'), - ('GMP', '6.2.0'), - ('CUDA', '11.3', '', SYSTEM), - ('SciPy-Stack', '2021', '-Python-%(pyver)s'), - ('OpenGL', '2020'), -] - -osdependencies = [('openssl')] - -skipsteps = ['configure'] -buildopts = " USE_SYSTEM_GMP=1 USE_INTEL_MKL=1 " -installopts = "prefix=%(installdir)s " - -arch_name = 'gpu' - -exts_defaultclass = 'JuliaPackage' -exts_list = [ - # General Purpose - ('PackageCompiler.jl', '1.2.5', { - 'source_tmpl': 'v1.2.5.tar.gz', - 'source_urls': ['https://github.com/JuliaLang/PackageCompiler.jl/archive/'], - # 'packagespec': 'name="PackageCompiler", version="1.2.5"', - }), - ('HTTP.jl', '0.9.5', { - 'source_tmpl': 'v0.9.5.tar.gz', - 'source_urls': ['https://github.com/JuliaWeb/HTTP.jl/archive/'], - }), - ('Parsers.jl', '1.1.0', { - 'source_tmpl': 'v1.1.0.tar.gz', - 'source_urls': ['https://github.com/JuliaData/Parsers.jl/archive/'], - }), - ('VersionParsing.jl', '1.2.0', { - 'source_tmpl': 'v1.2.0.tar.gz', - 'source_urls': ['https://github.com/JuliaInterop/VersionParsing.jl/archive/'], - }), - ('JSON.jl', '0.21.1', { - 'source_tmpl': 'v0.21.1.tar.gz', - 'source_urls': ['https://github.com/JuliaIO/JSON.jl/archive/'], - }), - ('WebIO.jl', '0.8.15', { - 'source_tmpl': 'v0.8.15.tar.gz', - 'source_urls': ['https://github.com/JuliaGizmos/WebIO.jl/archive/'], - }), - ('ProgressMeter.jl', '1.5.0', { - 'source_tmpl': 'v1.5.0.tar.gz', - 'source_urls': ['https://github.com/timholy/ProgressMeter.jl/archive/'], - }), - ('Conda.jl', '1.5.2', { - 'source_tmpl': 'v1.5.2.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/Conda.jl/archive/'], - }), - ('PyCall.jl', '1.92.3', { - 'source_tmpl': 'v1.92.3.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/PyCall.jl/archive/'], - }), - ('LaTeXStrings.jl', '1.2.1', { - 'source_tmpl': 'v1.2.1.tar.gz', - 'source_urls': ['https://github.com/stevengj/LaTeXStrings.jl/archive/'], - }), - ('DocumentFormat.jl', '3.2.0', { - 'source_tmpl': 'v3.2.0.tar.gz', - 'source_urls': ['https://github.com/julia-vscode/DocumentFormat.jl/archive/'], - }), - # Data Science - ('CSV.jl', '0.8.4', { - 'source_tmpl': 'v0.8.4.tar.gz', - 'source_urls': ['https://github.com/JuliaData/CSV.jl/archive/'], - }), - ('DataFrames.jl', '0.21.8', { - 'source_tmpl': 'v0.21.8.tar.gz', - 'source_urls': ['https://github.com/JuliaData/DataFrames.jl/archive/'], - }), - ('Arrow.jl', '1.4.1', { - 'source_tmpl': 'v1.4.1.tar.gz', - 'source_urls': ['https://github.com/JuliaData/Arrow.jl/archive/'], - }), - ('OnlineStats.jl', '1.5.8', { - 'source_tmpl': 'v1.5.8.tar.gz', - 'source_urls': ['https://github.com/joshday/OnlineStats.jl/archive/'], - }), - ('Query.jl', '1.0.0', { - 'source_tmpl': 'v1.0.0.tar.gz', - 'source_urls': ['https://github.com/queryverse/Query.jl/archive/'], - }), - # Scientific Domains - ('GSL.jl', '1.0.1', { - 'source_tmpl': 'v1.0.1.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/GSL.jl/archive/refs/tags/'], - }), - ('DifferentialEquations.jl', '6.16.0', { - 'source_tmpl': 'v6.16.0.tar.gz', - 'source_urls': ['https://github.com/SciML/DifferentialEquations.jl/archive/'], - }), - ('Distributions.jl', '0.24.18', { - 'source_tmpl': 'v0.24.18.tar.gz', - 'source_urls': ['https://github.com/JuliaStats/Distributions.jl/archive/'], - }), - ('Optim.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/JuliaNLSolvers/Optim.jl/archive/'], - }), - ('IterativeSolvers.jl', '0.9.0', { - 'source_tmpl': 'v0.9.0.tar.gz', - 'source_urls': ['https://github.com/JuliaLinearAlgebra/IterativeSolvers.jl/archive/'], - }), - ('AbstractFFTs.jl', '1.0.1', { - 'source_tmpl': 'v1.0.1.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/AbstractFFTs.jl/archive/'], - }), - ('OrdinaryDiffEq.jl', '5.52.7', { - 'source_tmpl': 'v5.52.7.tar.gz', - 'source_urls': ['https://github.com/SciML/OrdinaryDiffEq.jl/archive/'], - }), - ('SpecialFunctions.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/SpecialFunctions.jl/archive/'], - }), - ('JuMP.jl', '0.21.7', { - 'source_tmpl': 'v0.21.7.tar.gz', - 'source_urls': ['https://github.com/jump-dev/JuMP.jl/archive/'], - }), - # Visualization - ('GR.jl', '0.57.4', { - 'source_tmpl': 'v0.57.4.tar.gz', - 'source_urls': ['https://github.com/jheinen/GR.jl/archive/'], - }), - ('PlotlyJS.jl', '0.14.1', { - 'source_tmpl': 'v0.14.1.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/PlotlyJS.jl/archive/'], - }), - ('PyPlot.jl', '2.9.0', { - 'source_tmpl': 'v2.9.0.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/PyPlot.jl/archive/'], - }), - ('Plots.jl', '1.12.0', { - 'source_tmpl': 'v1.12.0.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/Plots.jl/archive/'], - }), - ('UnicodePlots.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/Evizero/UnicodePlots.jl/archive/'], - }), - ('StatsPlots.jl', '0.14.19', { - 'source_tmpl': 'v0.14.19.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/StatsPlots.jl/archive/'], - }), - # CUDA - ('CUDA.jl', '3.1.0', { - 'source_tmpl': 'v3.1.0.tar.gz', - 'source_urls': ['https://github.com/JuliaGPU/CUDA.jl/archive/'] - }), -] - -modextravars = { - 'JULIA_CUDA_USE_BINARYBUILDER': 'false', -} - -sanity_check_paths = { - 'files': ['bin/julia', 'include/julia/julia.h', 'lib/libjulia.so'], - 'dirs': ['bin', 'etc', 'include', 'lib', 'share'] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Julia/Julia-1.6.1-gcccoremkl-9.3.0-2020.2.254.eb b/Golden_Repo/j/Julia/Julia-1.6.1-gcccoremkl-9.3.0-2020.2.254.eb deleted file mode 100644 index 147f4a0919dd937b4d3d520d428622e6fff4e657..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Julia/Julia-1.6.1-gcccoremkl-9.3.0-2020.2.254.eb +++ /dev/null @@ -1,186 +0,0 @@ -name = 'Julia' -version = '1.6.1' - -homepage = 'https://julialang.org/' -description = """Julia was designed from the beginning for high performance. -Julia programs compile to efficient native code for multiple platforms via LLVM -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True, 'verbose': True} # , 'usempi': True} - -source_urls = ['https://github.com/JuliaLang/julia/releases/download/v%(version)s/'] -sources = ['julia-%(version)s-full.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('git', '2.28.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('GMP', '6.2.0'), - ('CUDA', '11.0', '', SYSTEM), - ('SciPy-Stack', '2020', '-Python-%(pyver)s'), - ('OpenGL', '2020'), -] - -osdependencies = [('openssl')] - -skipsteps = ['configure'] -buildopts = " USE_SYSTEM_GMP=1 USE_INTEL_MKL=1 " -installopts = "prefix=%(installdir)s " - -arch_name = 'gpu' - -exts_defaultclass = 'JuliaPackage' -exts_list = [ - # General Purpose - ('PackageCompiler.jl', '1.2.5', { - 'source_tmpl': 'v1.2.5.tar.gz', - 'source_urls': ['https://github.com/JuliaLang/PackageCompiler.jl/archive/'], - # 'packagespec': 'name="PackageCompiler", version="1.2.5"', - }), - ('HTTP.jl', '0.9.5', { - 'source_tmpl': 'v0.9.5.tar.gz', - 'source_urls': ['https://github.com/JuliaWeb/HTTP.jl/archive/'], - }), - ('Parsers.jl', '1.1.0', { - 'source_tmpl': 'v1.1.0.tar.gz', - 'source_urls': ['https://github.com/JuliaData/Parsers.jl/archive/'], - }), - ('VersionParsing.jl', '1.2.0', { - 'source_tmpl': 'v1.2.0.tar.gz', - 'source_urls': ['https://github.com/JuliaInterop/VersionParsing.jl/archive/'], - }), - ('JSON.jl', '0.21.1', { - 'source_tmpl': 'v0.21.1.tar.gz', - 'source_urls': ['https://github.com/JuliaIO/JSON.jl/archive/'], - }), - ('WebIO.jl', '0.8.15', { - 'source_tmpl': 'v0.8.15.tar.gz', - 'source_urls': ['https://github.com/JuliaGizmos/WebIO.jl/archive/'], - }), - ('ProgressMeter.jl', '1.5.0', { - 'source_tmpl': 'v1.5.0.tar.gz', - 'source_urls': ['https://github.com/timholy/ProgressMeter.jl/archive/'], - }), - ('Conda.jl', '1.5.2', { - 'source_tmpl': 'v1.5.2.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/Conda.jl/archive/'], - }), - ('PyCall.jl', '1.92.3', { - 'source_tmpl': 'v1.92.3.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/PyCall.jl/archive/'], - }), - ('LaTeXStrings.jl', '1.2.1', { - 'source_tmpl': 'v1.2.1.tar.gz', - 'source_urls': ['https://github.com/stevengj/LaTeXStrings.jl/archive/'], - }), - ('DocumentFormat.jl', '3.2.0', { - 'source_tmpl': 'v3.2.0.tar.gz', - 'source_urls': ['https://github.com/julia-vscode/DocumentFormat.jl/archive/'], - }), - # Data Science - ('CSV.jl', '0.8.4', { - 'source_tmpl': 'v0.8.4.tar.gz', - 'source_urls': ['https://github.com/JuliaData/CSV.jl/archive/'], - }), - ('DataFrames.jl', '0.21.8', { - 'source_tmpl': 'v0.21.8.tar.gz', - 'source_urls': ['https://github.com/JuliaData/DataFrames.jl/archive/'], - }), - ('Arrow.jl', '1.4.1', { - 'source_tmpl': 'v1.4.1.tar.gz', - 'source_urls': ['https://github.com/JuliaData/Arrow.jl/archive/'], - }), - ('OnlineStats.jl', '1.5.8', { - 'source_tmpl': 'v1.5.8.tar.gz', - 'source_urls': ['https://github.com/joshday/OnlineStats.jl/archive/'], - }), - ('Query.jl', '1.0.0', { - 'source_tmpl': 'v1.0.0.tar.gz', - 'source_urls': ['https://github.com/queryverse/Query.jl/archive/'], - }), - # Scientific Domains - ('GSL.jl', '1.0.1', { - 'source_tmpl': 'v1.0.1.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/GSL.jl/archive/refs/tags/'], - }), - ('DifferentialEquations.jl', '6.16.0', { - 'source_tmpl': 'v6.16.0.tar.gz', - 'source_urls': ['https://github.com/SciML/DifferentialEquations.jl/archive/'], - }), - ('Distributions.jl', '0.24.18', { - 'source_tmpl': 'v0.24.18.tar.gz', - 'source_urls': ['https://github.com/JuliaStats/Distributions.jl/archive/'], - }), - ('Optim.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/JuliaNLSolvers/Optim.jl/archive/'], - }), - ('IterativeSolvers.jl', '0.9.0', { - 'source_tmpl': 'v0.9.0.tar.gz', - 'source_urls': ['https://github.com/JuliaLinearAlgebra/IterativeSolvers.jl/archive/'], - }), - ('AbstractFFTs.jl', '1.0.1', { - 'source_tmpl': 'v1.0.1.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/AbstractFFTs.jl/archive/'], - }), - ('OrdinaryDiffEq.jl', '5.52.7', { - 'source_tmpl': 'v5.52.7.tar.gz', - 'source_urls': ['https://github.com/SciML/OrdinaryDiffEq.jl/archive/'], - }), - ('SpecialFunctions.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/SpecialFunctions.jl/archive/'], - }), - ('JuMP.jl', '0.21.7', { - 'source_tmpl': 'v0.21.7.tar.gz', - 'source_urls': ['https://github.com/jump-dev/JuMP.jl/archive/'], - }), - # Visualization - ('GR.jl', '0.57.4', { - 'source_tmpl': 'v0.57.4.tar.gz', - 'source_urls': ['https://github.com/jheinen/GR.jl/archive/'], - }), - ('PlotlyJS.jl', '0.14.1', { - 'source_tmpl': 'v0.14.1.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/PlotlyJS.jl/archive/'], - }), - ('PyPlot.jl', '2.9.0', { - 'source_tmpl': 'v2.9.0.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/PyPlot.jl/archive/'], - }), - ('Plots.jl', '1.12.0', { - 'source_tmpl': 'v1.12.0.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/Plots.jl/archive/'], - }), - ('UnicodePlots.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/Evizero/UnicodePlots.jl/archive/'], - }), - ('StatsPlots.jl', '0.14.19', { - 'source_tmpl': 'v0.14.19.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/StatsPlots.jl/archive/'], - }), - # CUDA - ('CUDA.jl', '3.1.0', { - 'source_tmpl': 'v3.1.0.tar.gz', - 'source_urls': ['https://github.com/JuliaGPU/CUDA.jl/archive/'] - }), -] - -modextravars = { - 'JULIA_CUDA_USE_BINARYBUILDER': 'false', -} - -sanity_check_paths = { - 'files': ['bin/julia', 'include/julia/julia.h', 'lib/libjulia.so'], - 'dirs': ['bin', 'etc', 'include', 'lib', 'share'] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Julia/Julia-1.6.1-gomkl-2020.eb b/Golden_Repo/j/Julia/Julia-1.6.1-gomkl-2020.eb deleted file mode 100644 index f8fe254c8d33182905f0b86c74b03d40d3a049c5..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Julia/Julia-1.6.1-gomkl-2020.eb +++ /dev/null @@ -1,199 +0,0 @@ -name = 'Julia' -version = '1.6.1' - -homepage = 'https://julialang.org/' -description = """Julia was designed from the beginning for high performance. -Julia programs compile to efficient native code for multiple platforms via LLVM -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gomkl', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://github.com/JuliaLang/julia/releases/download/v%(version)s/'] -sources = ['julia-%(version)s-full.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('git', '2.28.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('GMP', '6.2.0'), - ('CUDA', '11.0', '', SYSTEM), - ('SciPy-Stack', '2020', '-Python-%(pyver)s', ('gcccoremkl', '9.3.0-2020.2.254')), - ('OpenGL', '2020'), -] - -osdependencies = [('openssl')] - -skipsteps = ['configure'] -buildopts = " USE_SYSTEM_GMP=1 USE_INTEL_MKL=1 " -installopts = "prefix=%(installdir)s " - -arch_name = 'gpu' - -exts_defaultclass = 'JuliaPackage' -exts_list = [ - # General Purpose - ('PackageCompiler.jl', '1.2.5', { - 'source_tmpl': 'v1.2.5.tar.gz', - 'source_urls': ['https://github.com/JuliaLang/PackageCompiler.jl/archive/'], - # 'packagespec': 'name="PackageCompiler", version="1.2.5"', - }), - ('HTTP.jl', '0.9.5', { - 'source_tmpl': 'v0.9.5.tar.gz', - 'source_urls': ['https://github.com/JuliaWeb/HTTP.jl/archive/'], - }), - ('Parsers.jl', '1.1.0', { - 'source_tmpl': 'v1.1.0.tar.gz', - 'source_urls': ['https://github.com/JuliaData/Parsers.jl/archive/'], - }), - ('VersionParsing.jl', '1.2.0', { - 'source_tmpl': 'v1.2.0.tar.gz', - 'source_urls': ['https://github.com/JuliaInterop/VersionParsing.jl/archive/'], - }), - ('JSON.jl', '0.21.1', { - 'source_tmpl': 'v0.21.1.tar.gz', - 'source_urls': ['https://github.com/JuliaIO/JSON.jl/archive/'], - }), - ('WebIO.jl', '0.8.15', { - 'source_tmpl': 'v0.8.15.tar.gz', - 'source_urls': ['https://github.com/JuliaGizmos/WebIO.jl/archive/'], - }), - ('ProgressMeter.jl', '1.5.0', { - 'source_tmpl': 'v1.5.0.tar.gz', - 'source_urls': ['https://github.com/timholy/ProgressMeter.jl/archive/'], - }), - ('Conda.jl', '1.5.2', { - 'source_tmpl': 'v1.5.2.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/Conda.jl/archive/'], - }), - ('PyCall.jl', '1.92.3', { - 'source_tmpl': 'v1.92.3.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/PyCall.jl/archive/'], - }), - ('LaTeXStrings.jl', '1.2.1', { - 'source_tmpl': 'v1.2.1.tar.gz', - 'source_urls': ['https://github.com/stevengj/LaTeXStrings.jl/archive/'], - }), - ('DocumentFormat.jl', '3.2.0', { - 'source_tmpl': 'v3.2.0.tar.gz', - 'source_urls': ['https://github.com/julia-vscode/DocumentFormat.jl/archive/'], - }), - # Data Science - ('CSV.jl', '0.8.4', { - 'source_tmpl': 'v0.8.4.tar.gz', - 'source_urls': ['https://github.com/JuliaData/CSV.jl/archive/'], - }), - ('DataFrames.jl', '0.21.8', { - 'source_tmpl': 'v0.21.8.tar.gz', - 'source_urls': ['https://github.com/JuliaData/DataFrames.jl/archive/'], - }), - ('Arrow.jl', '1.4.1', { - 'source_tmpl': 'v1.4.1.tar.gz', - 'source_urls': ['https://github.com/JuliaData/Arrow.jl/archive/'], - }), - ('OnlineStats.jl', '1.5.8', { - 'source_tmpl': 'v1.5.8.tar.gz', - 'source_urls': ['https://github.com/joshday/OnlineStats.jl/archive/'], - }), - ('Query.jl', '1.0.0', { - 'source_tmpl': 'v1.0.0.tar.gz', - 'source_urls': ['https://github.com/queryverse/Query.jl/archive/'], - }), - # Scientific Domains - ('GSL.jl', '1.0.1', { - 'source_tmpl': 'v1.0.1.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/GSL.jl/archive/refs/tags/'], - }), - ('DifferentialEquations.jl', '6.16.0', { - 'source_tmpl': 'v6.16.0.tar.gz', - 'source_urls': ['https://github.com/SciML/DifferentialEquations.jl/archive/'], - }), - ('Distributions.jl', '0.24.18', { - 'source_tmpl': 'v0.24.18.tar.gz', - 'source_urls': ['https://github.com/JuliaStats/Distributions.jl/archive/'], - }), - ('Optim.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/JuliaNLSolvers/Optim.jl/archive/'], - }), - ('IterativeSolvers.jl', '0.9.0', { - 'source_tmpl': 'v0.9.0.tar.gz', - 'source_urls': ['https://github.com/JuliaLinearAlgebra/IterativeSolvers.jl/archive/'], - }), - ('AbstractFFTs.jl', '1.0.1', { - 'source_tmpl': 'v1.0.1.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/AbstractFFTs.jl/archive/'], - }), - ('OrdinaryDiffEq.jl', '5.52.7', { - 'source_tmpl': 'v5.52.7.tar.gz', - 'source_urls': ['https://github.com/SciML/OrdinaryDiffEq.jl/archive/'], - }), - ('SpecialFunctions.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/SpecialFunctions.jl/archive/'], - }), - ('JuMP.jl', '0.21.7', { - 'source_tmpl': 'v0.21.7.tar.gz', - 'source_urls': ['https://github.com/jump-dev/JuMP.jl/archive/'], - }), - # Visualization - ('GR.jl', '0.57.4', { - 'source_tmpl': 'v0.57.4.tar.gz', - 'source_urls': ['https://github.com/jheinen/GR.jl/archive/'], - }), - ('PlotlyJS.jl', '0.14.1', { - 'source_tmpl': 'v0.14.1.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/PlotlyJS.jl/archive/'], - }), - ('PyPlot.jl', '2.9.0', { - 'source_tmpl': 'v2.9.0.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/PyPlot.jl/archive/'], - }), - ('Plots.jl', '1.12.0', { - 'source_tmpl': 'v1.12.0.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/Plots.jl/archive/'], - }), - ('UnicodePlots.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/Evizero/UnicodePlots.jl/archive/'], - }), - ('StatsPlots.jl', '0.14.19', { - 'source_tmpl': 'v0.14.19.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/StatsPlots.jl/archive/'], - }), - # MPI - ('MPI.jl', '0.17.2', { - 'mpiexec': 'srun', - 'mpi_path': '$EBROOTOPENMPI', - 'source_tmpl': 'v0.17.2.tar.gz', - 'source_urls': ['https://github.com/JuliaParallel/MPI.jl/archive/'], - }), - # CUDA - ('CUDA.jl', '3.1.0', { - 'source_tmpl': 'v3.1.0.tar.gz', - 'source_urls': ['https://github.com/JuliaGPU/CUDA.jl/archive/'] - }), -] - -modextravars = { - 'JULIA_MPICC': 'mpicc', - 'JULIA_MPIEXEC': 'srun', - # 'JULIA_MPIEXEC_ARGS': '', - 'JULIA_MPI_ABI': 'OpenMPI', - 'JULIA_MPI_BINARY': 'system', - 'JULIA_MPI_PATH': '$::env(EBROOTOPENMPI)', - 'JULIA_CUDA_USE_BINARYBUILDER': 'false', -} - -sanity_check_paths = { - 'files': ['bin/julia', 'include/julia/julia.h', 'lib/libjulia.so'], - 'dirs': ['bin', 'etc', 'include', 'lib', 'share'] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Julia/Julia-1.6.1-gomkl-2021.eb b/Golden_Repo/j/Julia/Julia-1.6.1-gomkl-2021.eb deleted file mode 100644 index 555347b99c8dec63f9ed7c0393993189808ef7c7..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Julia/Julia-1.6.1-gomkl-2021.eb +++ /dev/null @@ -1,199 +0,0 @@ -name = 'Julia' -version = '1.6.1' - -homepage = 'https://julialang.org/' -description = """Julia was designed from the beginning for high performance. -Julia programs compile to efficient native code for multiple platforms via LLVM -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://github.com/JuliaLang/julia/releases/download/v%(version)s/'] -sources = ['julia-%(version)s-full.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('git', '2.28.0'), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Python', '3.8.5'), - ('GMP', '6.2.0'), - ('CUDA', '11.3', '', SYSTEM), - ('SciPy-Stack', '2021', '-Python-%(pyver)s', ('gcccoremkl', '10.3.0-2021.2.0')), - ('OpenGL', '2020'), -] - -osdependencies = [('openssl')] - -skipsteps = ['configure'] -buildopts = " USE_SYSTEM_GMP=1 USE_INTEL_MKL=1 " -installopts = "prefix=%(installdir)s " - -arch_name = 'gpu' - -exts_defaultclass = 'JuliaPackage' -exts_list = [ - # General Purpose - ('PackageCompiler.jl', '1.2.5', { - 'source_tmpl': 'v1.2.5.tar.gz', - 'source_urls': ['https://github.com/JuliaLang/PackageCompiler.jl/archive/'], - # 'packagespec': 'name="PackageCompiler", version="1.2.5"', - }), - ('HTTP.jl', '0.9.5', { - 'source_tmpl': 'v0.9.5.tar.gz', - 'source_urls': ['https://github.com/JuliaWeb/HTTP.jl/archive/'], - }), - ('Parsers.jl', '1.1.0', { - 'source_tmpl': 'v1.1.0.tar.gz', - 'source_urls': ['https://github.com/JuliaData/Parsers.jl/archive/'], - }), - ('VersionParsing.jl', '1.2.0', { - 'source_tmpl': 'v1.2.0.tar.gz', - 'source_urls': ['https://github.com/JuliaInterop/VersionParsing.jl/archive/'], - }), - ('JSON.jl', '0.21.1', { - 'source_tmpl': 'v0.21.1.tar.gz', - 'source_urls': ['https://github.com/JuliaIO/JSON.jl/archive/'], - }), - ('WebIO.jl', '0.8.15', { - 'source_tmpl': 'v0.8.15.tar.gz', - 'source_urls': ['https://github.com/JuliaGizmos/WebIO.jl/archive/'], - }), - ('ProgressMeter.jl', '1.5.0', { - 'source_tmpl': 'v1.5.0.tar.gz', - 'source_urls': ['https://github.com/timholy/ProgressMeter.jl/archive/'], - }), - ('Conda.jl', '1.5.2', { - 'source_tmpl': 'v1.5.2.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/Conda.jl/archive/'], - }), - ('PyCall.jl', '1.92.3', { - 'source_tmpl': 'v1.92.3.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/PyCall.jl/archive/'], - }), - ('LaTeXStrings.jl', '1.2.1', { - 'source_tmpl': 'v1.2.1.tar.gz', - 'source_urls': ['https://github.com/stevengj/LaTeXStrings.jl/archive/'], - }), - ('DocumentFormat.jl', '3.2.0', { - 'source_tmpl': 'v3.2.0.tar.gz', - 'source_urls': ['https://github.com/julia-vscode/DocumentFormat.jl/archive/'], - }), - # Data Science - ('CSV.jl', '0.8.4', { - 'source_tmpl': 'v0.8.4.tar.gz', - 'source_urls': ['https://github.com/JuliaData/CSV.jl/archive/'], - }), - ('DataFrames.jl', '0.21.8', { - 'source_tmpl': 'v0.21.8.tar.gz', - 'source_urls': ['https://github.com/JuliaData/DataFrames.jl/archive/'], - }), - ('Arrow.jl', '1.4.1', { - 'source_tmpl': 'v1.4.1.tar.gz', - 'source_urls': ['https://github.com/JuliaData/Arrow.jl/archive/'], - }), - ('OnlineStats.jl', '1.5.8', { - 'source_tmpl': 'v1.5.8.tar.gz', - 'source_urls': ['https://github.com/joshday/OnlineStats.jl/archive/'], - }), - ('Query.jl', '1.0.0', { - 'source_tmpl': 'v1.0.0.tar.gz', - 'source_urls': ['https://github.com/queryverse/Query.jl/archive/'], - }), - # Scientific Domains - ('GSL.jl', '1.0.1', { - 'source_tmpl': 'v1.0.1.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/GSL.jl/archive/refs/tags/'], - }), - ('DifferentialEquations.jl', '6.16.0', { - 'source_tmpl': 'v6.16.0.tar.gz', - 'source_urls': ['https://github.com/SciML/DifferentialEquations.jl/archive/'], - }), - ('Distributions.jl', '0.24.18', { - 'source_tmpl': 'v0.24.18.tar.gz', - 'source_urls': ['https://github.com/JuliaStats/Distributions.jl/archive/'], - }), - ('Optim.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/JuliaNLSolvers/Optim.jl/archive/'], - }), - ('IterativeSolvers.jl', '0.9.0', { - 'source_tmpl': 'v0.9.0.tar.gz', - 'source_urls': ['https://github.com/JuliaLinearAlgebra/IterativeSolvers.jl/archive/'], - }), - ('AbstractFFTs.jl', '1.0.1', { - 'source_tmpl': 'v1.0.1.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/AbstractFFTs.jl/archive/'], - }), - ('OrdinaryDiffEq.jl', '5.52.7', { - 'source_tmpl': 'v5.52.7.tar.gz', - 'source_urls': ['https://github.com/SciML/OrdinaryDiffEq.jl/archive/'], - }), - ('SpecialFunctions.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/JuliaMath/SpecialFunctions.jl/archive/'], - }), - ('JuMP.jl', '0.21.7', { - 'source_tmpl': 'v0.21.7.tar.gz', - 'source_urls': ['https://github.com/jump-dev/JuMP.jl/archive/'], - }), - # Visualization - ('GR.jl', '0.57.4', { - 'source_tmpl': 'v0.57.4.tar.gz', - 'source_urls': ['https://github.com/jheinen/GR.jl/archive/'], - }), - ('PlotlyJS.jl', '0.14.1', { - 'source_tmpl': 'v0.14.1.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/PlotlyJS.jl/archive/'], - }), - ('PyPlot.jl', '2.9.0', { - 'source_tmpl': 'v2.9.0.tar.gz', - 'source_urls': ['https://github.com/JuliaPy/PyPlot.jl/archive/'], - }), - ('Plots.jl', '1.12.0', { - 'source_tmpl': 'v1.12.0.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/Plots.jl/archive/'], - }), - ('UnicodePlots.jl', '1.3.0', { - 'source_tmpl': 'v1.3.0.tar.gz', - 'source_urls': ['https://github.com/Evizero/UnicodePlots.jl/archive/'], - }), - ('StatsPlots.jl', '0.14.19', { - 'source_tmpl': 'v0.14.19.tar.gz', - 'source_urls': ['https://github.com/JuliaPlots/StatsPlots.jl/archive/'], - }), - # MPI - ('MPI.jl', '0.17.2', { - 'mpiexec': 'srun', - 'mpi_path': '$EBROOTOPENMPI', - 'source_tmpl': 'v0.17.2.tar.gz', - 'source_urls': ['https://github.com/JuliaParallel/MPI.jl/archive/'], - }), - # CUDA - ('CUDA.jl', '3.1.0', { - 'source_tmpl': 'v3.1.0.tar.gz', - 'source_urls': ['https://github.com/JuliaGPU/CUDA.jl/archive/'] - }), -] - -modextravars = { - 'JULIA_MPICC': 'mpicc', - 'JULIA_MPIEXEC': 'srun', - # 'JULIA_MPIEXEC_ARGS': '', - 'JULIA_MPI_ABI': 'OpenMPI', - 'JULIA_MPI_BINARY': 'system', - 'JULIA_MPI_PATH': '$::env(EBROOTOPENMPI)', - 'JULIA_CUDA_USE_BINARYBUILDER': 'false', -} - -sanity_check_paths = { - 'files': ['bin/julia', 'include/julia/julia.h', 'lib/libjulia.so'], - 'dirs': ['bin', 'etc', 'include', 'lib', 'share'] -} - -moduleclass = 'lang' diff --git a/Golden_Repo/j/Jupyter/401html.patch b/Golden_Repo/j/Jupyter/401html.patch deleted file mode 100644 index c3e71a800541a396377737132022c67a077d7564..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/401html.patch +++ /dev/null @@ -1,135 +0,0 @@ -diff -Naur jupyterlab-2.2.9.orig/401.html jupyterlab-2.2.9/401.html ---- jupyterlab-2.2.9.orig/401.html 1970-01-01 01:00:00.000000000 +0100 -+++ jupyterlab-2.2.9/401.html 2020-12-11 23:24:45.301738818 +0100 -@@ -0,0 +1,131 @@ -+<!DOCTYPE html> -+<html><head> -+ <meta http-equiv="Refresh" content="0; url=https://jupyter-jsc.fz-juelich.de/hub/logout?stopall=false&alldevices=false" /> -+ -+ <meta http-equiv="content-type" content="text/html; charset=UTF-8"> -+ <meta charset="utf-8"> -+ -+ <title>jupyter-jsc</title> -+ <meta http-equiv="X-UA-Compatible" content="chrome=1"> -+ <meta property="og:image" content="/hub/static/images/mini_website.jpg"> -+ <meta property="og:locale" content="en_US"> -+ <meta property="og:site_name" content="jupyter-jsc"> -+ <meta property="og:title" content="jupyter-jsc"> -+ <meta property="og:type" content="website"> -+ <meta property="og:url" content="https://jupyter-jsc.fz-juelich.de/"> -+ -+ <link rel="stylesheet" href="/hub/static/css/style.min.css" type="text/css"> -+ <link rel="stylesheet" href="/hub/static/css/j4j_font.min.htm" type="text/css"> -+ <link rel="stylesheet" href="/hub/static/css/j4j_base.min.css" type="text/css"> -+ <link rel="stylesheet" href="/hub/static/css/j4j_base_header.min.css" type="text/css"> -+ <link rel="stylesheet" href="/hub/static/css/j4j_base_footer.min.css" type="text/css"> -+ <link rel="icon" href="/hub//static/images/favicon.svg" type="jpg/png"> -+ <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/jquery-confirm/3.3.2/jquery-confirm.min.css"> -+ <link rel="stylesheet" href="/hub/static/css/j4j_page_home.min.css" type="text/css"> -+ <link rel="stylesheet" href="/hub/static/css/spawn_style.css" type="text/css"> -+ -+<body> -+ -+<div id="container"> -+ -+ <div id="header-background"> -+ <div id="header"> -+ <nav class="navbar navbar-default"> -+ <div class="container-fluid"> -+ <div class="navbar-header"> -+ <span id="jupyterhub-logo" class="pull-left"><a href="https://www.fz-juelich.de/jsc" target="_blank"><img src="/hub/static/images/jsc.png" alt="JupyterHub" class="jpy-logo" title="Home"></a></span> -+ </div> -+ -+ <div id="thenavbar"> -+ <ul class="nav navbar-nav"> -+ -+ <li><a href="https://jupyter-jsc.fz-juelich.de/hub/start">Start</a></li> -+ -+ <li id="navbarbtn-links" class="main-menu-btn menu-btn"><a>Links</a> -+ <div id="navbarmenu-links" class="menu-box"> -+ <ul> -+ <li id="navbarbtn-links-1" class="menu-btn"><a>jupyter-jsc</a> -+ <div id="navbarmenu-links-1" class="menu-box menu-sub-box show-sub-header" style=""> -+ <ul> -+ <li class=""><a href="https://jupyter-jsc.fz-juelich.de/nbviewer/github/kreuzert/Jupyter-JSC/blob/master/Extensions.ipynb">Extensions at jupyter-jsc</a></li> -+ <li class=""><a href="https://jupyter-jsc.fz-juelich.de/nbviewer/github/kreuzert/Jupyter-JSC/blob/master/FAQ.ipynb">HDFCloud FAQ</a></li> -+ <li class=""><a href="https://jupyter-jsc.fz-juelich.de/static/files/projects.html">Link Projects to Home</a></li> -+ <li class=""><a href="https://jupyter-jsc.fz-juelich.de/static/files/kernel.html">Setup your own kernel</a></li> -+ <li class=""><a target="_blank" href="https://www.unicore.eu/about-unicore/case-studies/jupyter-at-jsc/">jupyter-jsc at unicore.eu</a></li> -+ </ul> -+ </div> -+ </li> -+ <li id="navbarbtn-links-2" class="menu-btn"><a>Jupyter</a> -+ <div id="navbarmenu-links-2" class="menu-box menu-sub-box show-sub-header" style=""> -+ <ul> -+ <li class=""><a target="_blank" href="https://www-jupyter.org/">Home</a></li> -+ <li class=""><a target="_blank" href="https://newsletter.jupyter.org/">Newsletter</a></li> -+ <li class=""><a target="_blank" href="https://www.youtube.com/watch?v=HW29067qVWk">Introduction Video</a></li> -+ <li class=""><a target="_blank" href="https://blog.jupyter.org/">Blog</a></li> -+ <li class=""><a target="_blank" href="https://jupyter.org/documentation.html">Documentation</a></li> -+ <li class=""><a target="_blank" href="https://www.oreilly.com/topics/jupyter">O'Reilly on Jupyter</a></li> -+ <li class=""><a target="_blank" href="https://twitter.com/projectjupyter">Twitter</a></li> -+ <li class=""><a target="_blank" href="https://github.com/trending/jupyter-notebook">Jupyter-Notebooks</a></li> -+ </ul> -+ </div> -+ </li> -+ <li id="navbarbtn-links-3" class="menu-btn"><a>JSC</a> -+ <div id="navbarmenu-links-3" class="menu-box menu-sub-box show-sub-header" style=""> -+ <ul> -+ <li class=""><a target="_blank" href="https://www.fz-juelich.de/ias/jsc/EN/Expertise/Supercomputers/JUWELS/JUWELS_node.html">JUWELS</a></li> -+ <li class=""><a target="_blank" href="https://www.fz-juelich.de/ias/jsc/EN/Expertise/Supercomputers/JURECA/JURECA_node.html">JURECA</a></li> -+ <li class=""><a target="_blank" href="https://hbp-hpc-platform.fz-juelich.de/?page_id=1073">JURON</a></li> -+ <li class=""><a target="_blank" href="https://www.fz-juelich.de/ias/jsc/EN/News/Newsletter/newsletter_node.html">Newsletter</a></li> -+ <li class=""><a target="_blank" href="https://www.fz-juelich.de/ias/jsc/EN/News/Events/events_node.html">Events</a></li> -+ <li class=""><a target="_blank" href="https://twitter.com/fzj_jsc">Twitter</a></li> -+ </ul> -+ </div> -+ </li> -+ </ul> -+ </div> -+ </li> -+ -+ </ul> -+ </div> -+ </div> -+ </nav> -+ </div> -+ </div> -+ -+<div id="body"> -+<div class="background-wrapper"> -+ <div class="content" id="JupyterLabs-div"> -+ -+ <!--<center><h2 style="color:red">jupyter-jsc maintenance: 25-02-2020 - 26-02-2020</h2></center>--> -+ <h2> -+ The access token of your browser session to the running JupyterLab has expired. -+ </h2> -+ <p> -+ Unfortunately you have to log out and log in again from the Jupyter-JSC to regain access permission.<br> -+ <a href="https://jupyter-jsc.fz-juelich.de/hub/logout?stopall=false&alldevices=false"> Logout now </a> -+ </p> -+ -+ </div> -+</div> -+</div> -+ -+<div class="footer"> -+ <div class="footer-top-background"> -+ </div> -+ <div class="footer-bottom-background"> -+ <div class="footer-bottom"> -+ <div class="footer-links"> -+ <span>© Forschungszentrum Jülich</span> -+ <a href="https://jupyter-jsc.fz-juelich.de/hub/imprint">Imprint</a> -+ <a href="https://jupyter-jsc.fz-juelich.de/hub/privacy">Privacy Policy</a> -+ <a href="mailto:ds-support@fz-juelich.de?subject=jupyter-jsc Support&body=Please describe your problem here. (english or german)">Support</a> -+ <a href="https://jupyter-jsc.fz-juelich.de/hub/terms">Terms of Service</a> -+ </div> -+ <a href="https://www.helmholtz.de/en/" target="_blank"><img class="helmholtz-logo" src="/hub/static/images/helmholtz.png"></a> -+ </div> -+ </div> -+</div> -+ -+</div> <!-- container --> -+ -+</body></html> diff --git a/Golden_Repo/j/Jupyter/Jupyter-2020.2.5-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/j/Jupyter/Jupyter-2020.2.5-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index b5b826ec34b3b8385af35bf77c3ea82903fc14c2..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/Jupyter-2020.2.5-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,1317 +0,0 @@ -easyblock = 'Bundle' - -name = 'Jupyter' -version = '2020.2.5' -versionsuffix = '-Python-%(pyver)s' - -local_jlab_version = '2.2.9' - -homepage = 'http://www.jupyter.org' -description = """ -Project Jupyter exists to develop open-source software, open-standards, and services for interactive computing across -dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('unzip', '6.0'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('libyaml', '0.2.5'), - ('Pandoc', '2.11.0.4', '', SYSTEM), # For doc-generation - ('texlive', '20200406'), - ('ITK', '5.1.2', '-nompi' + versionsuffix), - ('HDF5', '1.10.6', '-serial'), - ('netcdf4-python', '1.5.4', '-serial' + versionsuffix), - ('FFmpeg', '4.3.1'), # for pydub - ('LLVM', '10.0.1'), # llvmlite - ('git', '2.28.0'), # for jupyterlab-git (req. >=2.0) - ('SciPy-Stack', '2020', versionsuffix), -] - -osdependencies = [('openssl'), ('git')] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_default_options = { - 'filter': ('python -c "import %(ext_name)s"', ''), - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': True, - 'use_pip_for_deps': False, -} - -components = [ - ('nodejs', '14.15.3', { - 'easyblock': 'ConfigureMake', - 'source_urls': ['http://nodejs.org/dist/v%(version)s/'], - 'sources': ['node-v%(version)s.tar.gz'], - 'start_dir': 'node-v%(version)s', - }), -] - -exts_list = [ - ('distro', '1.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92')]), - ])), - ('scikit-build', '0.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'da40dfd69b2456fad1349a894b90180b43712152b8a85d2a00f4ae2ce8ac9a5c')]), - ('modulename', 'skbuild'), - ])), - ('ptvsd', '4.3.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'ptvsd-4.3.2.zip'), - ('checksums', [('sha256', '3b05c06018fdbce5943c50fb0baac695b5c11326f9e21a5266c854306bda28ab')]), - ])), - # ('cryptography', '2.8', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651')]), - # ])), # part of Python module - ('pyOpenSSL', '19.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507')]), - ('modulename', 'OpenSSL'), - ])), - ('entrypoints', '0.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451')]), - ('use_pip', False), - ])), - ('async_generator', '1.10', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144')]), - ])), - ('nest_asyncio', '1.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'eaa09ef1353ebefae19162ad423eef7a12166bcc63866f8bff8f3635353cd9fa')]), - ])), - ('absl-py', '0.8.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd9129186431e150d7fe455f1cb1ecbb92bb5dba9da9bc3ef7b012d98c4db2526')]), - ('modulename', 'absl'), - ])), - ('websockify', '0.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c35b5b79ebc517d3b784dacfb993be413a93cda5222c6f382443ce29c1a6cada')]), - ])), - ('typing_extensions', '3.7.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae')]), - ])), - # General Python packages - ('tornado', '6.0.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c845db36ba616912074c5b1ee897f8e0124df269468f25e4fe21fe72f6edd7a9')]), - ])), - ('bokeh', '2.0.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd9248bdb0156797abf6d04b5eac581dcb121f5d1db7acbc13282b0609314893a')]), - ])), - ('seaborn', '0.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '76c83f794ca320fb6b23a7c6192d5e185a5fcf4758966a0c0a54baee46d41e2f')]), - ])), - ('nbformat', '5.0.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f545b22138865bfbcc6b1ffe89ed5a2b8e2dc5d4fe876f2ca60d8e6f702a30f8')]), - ])), - ('param', '1.9.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8370d41616e257b8ed2e242ec531e0340b8c954bea414b791fa0ef6235959981')]), - ])), - # Jupyter-core and dependencies - ('alabaster', '0.7.12', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02')]), - ])), - ('Babel', '2.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e86135ae101e31e2c8ec20a4e0c5220f4eed12487d5cf3f78be7e98d3a57fc28')]), - ('modulename', 'babel'), - ])), - ('snowballstemmer', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52')]), - ])), - ('docutils', '0.15.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99')]), - ])), - ('imagesize', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5')]), - ])), - ('sphinxcontrib-websupport', '1.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1501befb0fdf1d1c29a800fdbf4ef5dc5369377300ddbdd16d2cd40e54c6eefc')]), - ('modulename', 'sphinxcontrib'), - ])), - ('Sphinx', '1.8.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c7658aab75c920288a8cf6f09f244c6cfdae30d82d803ac1634d9f223a80ca08')]), - ('modulename', 'sphinx'), - ])), - # ('pexpect', '4.7.0', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '9e2c1fd0e6ee3a49b28f95d4b33bc389c89b20af6a1255906e90ff1262ce62eb')]), - # ])), # part of Python module, but in version 4.6.0 (sanity check fails if package used from Python dependency) - ('ipython', '7.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'dfd303b270b7b5232b3d08bd30ec6fd685d8a58cabd54055e3d69d8f029f7280')]), - ('modulename', 'IPython'), - ])), # part of Python module, but in version 7.4.0 (sanity check fails if package used from Python dependency) - ('ipynb', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8d834c777ca3885289938728cc382f081c86a58e92961e86f0aba60c96938ce5')]), - ])), - ('jupyter_core', '4.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'aa1f9496ab3abe72da4efe0daab0cb2233997914581f9a071e07498c6add8ed3')]), - ])), - ('retrying', '1.3.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '08c039560a6da2fe4f2c426d0766e284d3b736e355f8dd24b37367b0bb41973b')]), - ])), - ('plotly', '4.12.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9ec2c9f4cceac7c595ebb77c98cbeb6566a97bef777508584d9bb7d9bcb8854c')]), - ])), - ('tikzplotlib', '0.8.4', dict(list(local_common_opts.items()) + [ # renamed to matplotlib2tikz with version 0.8.0 - ('checksums', [('sha256', '284e70915fc6994472abd2fa47af947e7606085e9957898fc645f0dd9b44da8c')]), - ])), - # Jupyter client - ('jupyter_client', '6.1.7', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '49e390b36fe4b4226724704ea28d9fb903f1a3601b6882ce3105221cd09377a1')]), - ])), - ('pynvml', '8.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c8d4eadc648c7e12a3c9182a9750afd8481b76412f83747bcc01e2aa829cde5d')]), - ])), - # Jupyter notebook and dependencies - # ('traitlets', '4.3.3', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', 'd023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7')]), - # ])), # part of Python module (sanity check fails if package used from Python dependency) - ('pyzmq', '18.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '93f44739db69234c013a16990e43db1aa0af3cf5a4b8b377d028ff24515fbeb3')]), - ('modulename', 'zmq'), - ])), - ('singledispatch', '3.4.0.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c')]), - ])), - ('ipyparallel', '6.2.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '76c7b028962b0ba762e4e45b450ee3a4353e7221526a8af812e817d7ef6ac065')]), - ])), - ('ipykernel', '5.1.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b368ad13edb71fa2db367a01e755a925d7f75ed5e09fbd3f06c85e7a8ef108a8')]), - ])), - ('terminado', '0.8.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4804a774f802306a7d9af7322193c5390f1da0abb429e082a10ef1d46e6fb2c2')]), - ('use_pip', False), - ])), - ('bleach', '3.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'aa8b870d0f46965bac2c073a93444636b0e1ca74e9777e34f03dd494b8a59d48')]), - ])), - ('mistune', '0.8.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e')]), - ])), - ('pandocfilters', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b3dd70e169bb5449e6bc6ff96aea89c5eea8c5f6ab5e207fc2f521a2cf4a0da9')]), - ])), - ('testpath', '0.4.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e')]), - ('use_pip', False), - ])), - ('nbconvert', '6.0.7', dict(list(local_common_opts.items()) + [ # convert Jupyter notebooks to: HTML, Latex, etc. - # !!! nbconvert will try to read from all paths in <jupyter-config-path> the file nbconvert/templates/conf.json - # ensure it has permissions (https://github.com/jupyter/nbconvert/issues/1430) - ('checksums', [('sha256', 'cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002')]), - # 6.0.7 - patch for jupyter_contrib_nbextensions needed: - # https://github.com/ipython-contrib/jupyter_contrib_nbextensions/pull/1532 - ])), - # ('ipython_genutils', '0.2.0', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', 'eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8')]), - # ])), # part of Python module (sanity check fails if package used from Python dependency) - ('Send2Trash', '1.5.0', dict(list(local_common_opts.items()) + [ # req. by widgetsnbextension - ('checksums', [('sha256', '60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2')]), - ('modulename', 'send2trash'), - ])), - ('argon2-cffi', '20.1.0', dict(list(local_common_opts.items()) + [ # req. for notebook >= 6.1 - ('checksums', [('sha256', 'd8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d')]), - ('modulename', 'argon2'), - ])), - ('notebook', '6.1.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3db37ae834c5f3b6378381229d0e5dfcbfb558d08c8ce646b1ad355147f5e91d')]), - ('patches', ['notebook-6.0.3_jsc.patch']), # allow others to read/write in .ipynb_checkpoints - ])), - ('version_information', '1.0.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '811b9cea4b376aa62a77dc729a937ce8e2844573b8686b5c1840147054fb938d')]), - ])), - ('lesscpy', '0.13.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f3c6d0b544c5bcdadcd3d8319feccb4128d06676d4117c6c9396ab39c25372ad')]), - ])), - ('prometheus_client', '0.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da')]), - ])), - ('jupyterthemes', '0.20.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2a8ebc0c84b212ab99b9f1757fc0582a3f53930d3a75b2492d91a7c8b36ab41e')]), - ])), - # Jupyter Lab and dependencies - ('zipp', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e')]), - ])), - ('jupyter-packaging', '0.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd89134d2df88c30098bce0f8d8fb07b988ef0f616775dbd4b82dac9562b5cae6')]), - ])), - ('importlib_metadata', '0.23', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26')]), - ])), - ('jsonschema', '3.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2fa0684276b6333ff3c0b1b27081f4b2305f0a36cf702a23db50edb141893c3f')]), - ])), # part of Python module in version 3.0.1 (sanity check fails if package used from Python dependency) - ('jupyterlab_launcher', '0.13.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f880eada0b8b1f524d5951dc6fcae0d13b169897fc8a247d75fb5beadd69c5f0')]), - ])), - ('sphinx_rtd_theme', '0.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a')]), - ])), - ('future', '0.18.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '858e38522e8fd0d3ce8f0c1feaf0603358e366d5403209674c7b617fa0c24093')]), - ])), - ('commonmark', '0.9.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60')]), - ])), - ('recommonmark', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '29cd4faeb6c5268c633634f2d69aef9431e0f4d347f90659fd0aab20e541efeb')]), - ])), - ('jupyterlab', local_jlab_version, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3be8f8edea173753dd838c1b6d3bbcb6f5c801121f824a477025c1b6a1d33dc6')]), - ('patches', [('401html.patch', 1)]) - ])), - ('json5', '0.8.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '124b0f0da1ed2ff3bfe3a3e9b8630abd3c650852465cb52c15ef60b8e82a73b0')]), - ])), - ('jupyterlab_server', '1.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5431d9dde96659364b7cc877693d5d21e7b80cea7ae3959ecc2b87518e5f5d8c')]), - ])), - ('jupyter_kernel_gateway', '2.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6424a9f118e757ef18e9bed7784ca05ad9e633945df328ac4d8810eadc6f6ccd')]), - ('modulename', 'kernel_gateway'), - ])), - ('nbclient', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '01e2d726d16eaf2cde6db74a87e2451453547e8832d142f73f72fddcd4fe0250')]), - ])), - # Jupyter Kernel and dependencies - ('ptyprocess', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0')]), - ('use_pip', False), - ])), - # Jupyter Widgets and dependencies - ('defusedxml', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5')]), - ])), - ('widgetsnbextension', '3.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '079f87d87270bce047512400efd70238820751a11d2d8cb137a5a5bdbaf255c7')]), - ])), - ('ipywidgets', '7.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e945f6e02854a74994c596d9db83444a1850c01648f1574adf144fbbabe05c97')]), - ])), - # ('ipyscales', '0.5.0', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '')]), - # ])), - ('ipydatawidgets', '4.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd9f94828c11e3b40350fb14a02e027f42670a7c372bcb30db18d552dcfab7c01')]), - ])), - ('traittypes', '0.2.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'be6fa26294733e7489822ded4ae25da5b4824a8a7a0e0c2dccfde596e3489bd6')]), - ])), - ('bqplot', '0.12.17', dict(list(local_common_opts.items()) + [ # 2-D plotting with d3.js - ('checksums', [('sha256', 'f103d82e7c05ec29e2c4c2357cb207acb99cf5fedbad55f26e04d8bbdb3248ad')]), - ])), - ('jupyter_bokeh', '2.0.2', dict(list(local_common_opts.items()) + [ # ipywidget for bokeh - ('checksums', [('sha256', '8dc63198833e478c3231ba5a1c5492bac859f875b1dc4e8190ce308276aa01fc')]), - ])), - ('pythreejs', '2.2.0', dict(list(local_common_opts.items()) + [ # 3-D scene visualization with three.js - ('checksums', [('sha256', 'c05f52932efd58ff18beb333a6b2bb80341e980718a313b74821c4a5c8640721')]), - ])), - ('PyWavelets', '1.1.1', dict(list(local_common_opts.items()) + [ # for a nice threejs example notebook - ('checksums', [('sha256', '1a64b40f6acb4ffbaccce0545d7fc641744f95351f62e4c6aaa40549326008c9')]), - ('modulename', 'pywt'), - ])), - ('imageio', '2.6.1', dict(list(local_common_opts.items()) + [ # for a nice threejs example notebook - ('checksums', [('sha256', 'f44eb231b9df485874f2ffd22dfd0c3c711e7de076516b9374edea5c65bc67ae')]), - ])), - ('networkx', '2.3', dict(list(local_common_opts.items()) + [ # for a nice threejs example notebook - ('source_tmpl', 'networkx-2.3.zip'), - ('checksums', [('sha256', '8311ddef63cf5c5c5e7c1d0212dd141d9a1fe3f474915281b73597ed5f1d4e3d')]), - ])), - ('scikit-image', '0.16.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'dd7fbd32da74d4e9967dc15845f731f16e7966cee61f5dc0e12e2abb1305068c')]), - ('modulename', 'skimage'), - ])), - ('ipywebrtc', '0.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4db2be7d0bfcbd142b2f9f9e8303b926832a632ed4a3bc5681b319a5f226285a')]), - ])), - ('ipyvolume', '0.6.0a6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1a71c681dd39b514db966c4812bbbd1347ce082ee7a7bcc53f494e0546bf37ff')]), - ])), - ('branca', '0.3.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3e762c9bdf40725f3d05ea1fda8fae9b470bfada6474e43a1242c8204a7bb15e')]), - ])), - ('ipyleaflet', '0.13.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c05f138327926bc81f29a629588bd656be5ff76dd8785c1e7eac5445b1d5a432')]), - ])), - ('ipympl', '0.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c33243047166fbedf3729af116186ae1894ee45db71cbc6632bf057a913ae010')]), - ])), - # ('PyYAML', '5.1.2', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4')]), - # ('modulename', 'yaml'), - # ])), # part of Python module (sanity check fails if package used from Python dependency) - # Jupyter Notebook Extensions - ('jupyter_nbextensions_configurator', '0.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e5e86b5d9d898e1ffb30ebb08e4ad8696999f798fef3ff3262d7b999076e4e83')]), - ])), - ('jupyter_latex_envs', '1.4.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '070a31eb2dc488bba983915879a7c2939247bf5c3b669b398bdb36a9b5343872')]), - ('patches', ['jupyter_latex_envs-template_paths.patch']), - # support for nbconvert>=6.x -> https://github.com/jfbercher/jupyter_latex_envs/pull/58 - ('modulename', 'latex_envs'), - ])), - ('jupyter_highlight_selected_word', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9fa740424859a807950ca08d2bfd28a35154cd32dd6d50ac4e0950022adc0e7b')]), - ])), - ('prompt_toolkit', '2.0.10', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f15af68f66e664eaa559d4ac8a928111eebd5feda0c11738b5998045224829db')]), - ])), - ('jupyter_contrib_core', '0.3.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e65bc0e932ff31801003cef160a4665f2812efe26a53801925a634735e9a5794')]), - ])), - ('jupyter_contrib_nbextensions', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'eecd28ecc2fc410226c0a3d4932ed2fac4860ccf8d9e9b1b29548835a35b22ab')]), - ('patches', ['jupyter_contrib_nbextensions-template_paths.patch']), - # support for nbconvert>=6.x -> https://github.com/ipython-contrib/jupyter_contrib_nbextensions/pull/1532 - ])), - ('rise', '5.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2be45c3dbe27e63aae047c6a90be19798b6e17e9b6b0e25408b3c4f645658e26')]), - ])), - ('idna-ssl', '1.1.0', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', 'a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c')]), - ])), - ('multidict', '4.7.5', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', 'aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e')]), - ])), - ('yarl', '1.4.2', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', '58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b')]), - ])), - ('async-timeout', '3.0.1', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', '0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f')]), - ])), - ('aiohttp', '3.6.2', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', '259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326')]), - ])), - ('simpervisor', '0.3', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', 'd82e4527ae326747551e4bdfa632ff4ebef275ce721f80886c747adfdbf41c2e')]), - ])), - ('jupyter_server', '1.0.7', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b0b3185975b32a03520aa577d90145bc288a3a12124e0e80306ec6216c9b8634')]), - ])), - ('jupyter-server-proxy', '1.5.2', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://github.com/jupyterhub/jupyter-server-proxy/archive/']), - ('source_tmpl', 'v%(version)s.tar.gz'), - ('checksums', [('sha256', '140bd642c511519ddd2acc5f70e0b46a40bbc673c888dcb3b19981005286853b')]), - ('patches', ['jupyterserverproxy-urlfile.patch']), - ])), - # Jupyter Lab Extensions - ('jupyterlab_github', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1f560a91711b779d08118161af044caff44159e315cb80ae830d3dfbded7bac9')]), - # do not use pypi for download -> we need to patch drive.json - ('source_urls', ['https://github.com/jupyterlab/jupyterlab-github/archive']), - ('source_tmpl', 'v%(version)s.tar.gz'), - ('patches', ['jupyterlab_github-%(version)s_jsc.patch']), - ])), - ('jupyterlab-gitlab', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '31d3fbd9b139aca7704dd13967ac22c81e372b50c4db730f342ab7800db7a5c6')]), - # do not use pypi for download -> we need to patch drive.json - ('source_urls', ['https://gitlab.com/beenje/jupyterlab-gitlab/-/archive/%(version)s']), - ('source_tmpl', 'jupyterlab-gitlab-%(version)s.tar.gz'), - ('patches', ['jupyterlab-gitlab-%(version)s_jsc.patch']), - ])), - ('jupyterlab-quickopen', '0.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('90cba3a7e6ff8d2b3eaf2594079c25f82e2c158d2ec63ebd951e4042a7445f8e')]), - ])), - ('zstandard', '0.12.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a110fb3ad1db344fbb563942d314ec5f0f3bdfd6753ec6331dded03ad6c2affb')]), - ])), - - # https://blog.kitware.com/itk-is-on-pypi-pip-install-itk-is-here/ - ('itk_core', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_core-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '1f0254f0f134b709e6df3601dc38ccc45c2d5d5576fc10e1a0313d1fe8aefa84')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_filtering', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_filtering-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', 'f4a1256c8c684329780b9f4326fb571023af1d96fbda7cb7b513b4a395a9cd42')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_segmentation', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_segmentation-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', 'f81ba4881f7802eb5b911c4e1eac4706220647196ebda68477318630542db226')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_numerics', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_numerics-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '6ac84a6386cd51692ed9605b6daefcc6230ec976f93161c9c5d89cfdfe042eba')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_registration', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_registration-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', 'd25b578ffb859f07cbad6987d1adb507e357d91f82863faeae337d8645b9d29d')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_io', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_io-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '6ecc528ce4ae42b6d14c972d51055856f130a13474463ba4bf187ed2afdb04b1')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_meshtopolydata', '0.6.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_meshtopolydata-0.6.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '486e418808c27ccc76619737219fa57fcd1de2da5fcd0325ee8c4395965deb39')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('pyct', '0.4.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'df7b2d29f874cabdbc22e4f8cba2ceb895c48aa33da4e0fe679e89873e0a4c6e')]), - ])), - ('colorcet', '2.0.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '514813790a74b578c3eaff76b2102274c2ba8b0239c9504586df685223007dee')]), - ])), - ('itkwidgets', '0.32.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '11ee93007cf354405e01bf9a28fb1eb2fb0a5d139e1cdded1a07b47f7ee76972')]), - ])), - ('ujson', '3.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'abb1996ba1c1d2faf5b1e38efa97da7f64e5373a31f705b96fe0587f5f778db4')]), - ])), - # ('jupyterlab_iframe', '0.2.2', dict(list(local_common_opts.items()) + [ # open HTML URLs in Jupyter-Tabs - # ('checksums', [('sha256', '7c26cddc5f29f0b5ac6ba4707ce811f9787282549d22098b691bcdcc6692238c')]), - # ('use_pip', False), - # ])), - ('python-dotenv', '0.10.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f157d71d5fec9d4bd5f51c82746b6344dffa680ee85217c123f4a0c8117c4544')]), - ('modulename', 'dotenv'), - ])), - # ('ipyauth', '0.2.5', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '189db0e9fd48cf52f333ef8ac1a71b9e3a9eec6561f5085e3d7239ca2588ba87')]), - # ])), - ('jupyterlab_latex', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '722558144e6ce71abe4fba0fb01c13c37271fa2497881bfebe9b6422d1fc795b')]), - ])), - # ('jupyterlab_slurm', '1.0.5', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', 'f7c6a54efe529cc47194cb4a7cbf1225fc0b0668d4dfe118405b68bde805c061')]), - # ])), - ('jupyterlmod', '2.0.2', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://github.com/cmd-ntrf/jupyter-lmod/archive/']), - ('source_tmpl', '%(version)s.tar.gz'), - ('checksums', [('sha256', 'f547432afb30cb87c8dabf78fd4cc4c35ff4b6d2aafbae19c249b7d596015c6a')]), - ('patches', [ - 'jupyterlmod-urlfile.patch', - 'jupyterlmod-packagejson.patch', - ]), - ])), - ('nbresuse', '0.3.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5ba19dd0b08ff19470aeb34cda2f07276a3fb1cd750bc53f3a3e06322664e98e')]), - ])), - ('colorama', '0.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d')]), - ])), - ('nbdime', '2.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4e3efdcfda31c3074cb565cd8e76e2e5421b1c4560c3a00c56f8679dd15590e5')]), - ])), - ('smmap2', '2.0.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '29a9ffa0497e7f2be94ca0ed1ca1aa3cd4cf25a1f6b4f5f87f74b46ed91d609a')]), - ('modulename', 'smmap'), - ])), - ('gitdb2', '2.0.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1b6df1433567a51a4a9c1a5a0de977aa351a405cc56d7d35f3388bad1f630350')]), - ('modulename', 'gitdb'), - ])), - ('GitPython', '3.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3237caca1139d0a7aa072f6735f5fd2520de52195e0fa1d8b83a9b212a2498b2')]), - ('modulename', 'git'), - ])), - ('jupyterlab_git', '0.23.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3c709c33df0b838e50f76fa2e7e0302bd3c32ec24e161ee0e8f436a3844e8b16')]), - ])), - ('sidecar', '0.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3e79de269ddd2c0eff1cb26025cb9f434cf9914b777cf03eeab8347f6de7160e')]), - ])), - ('pycodestyle', '2.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e')]), - ])), - ('autopep8', '1.4.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4d8eec30cc81bc5617dbf1218201d770dc35629363547f17577c61683ccfb3ee')]), - ])), - ('yapf', '0.28.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6f94b6a176a7c114cfa6bad86d40f259bbe0f10cf2fa7f2f4b3596fc5802a41b')]), - ])), - ('isort', '4.3.21', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1')]), - ])), - ('toml', '0.10.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c')]), - ])), - ('pathspec', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e285ccc8b0785beadd4c18e5708b12bb8fcf529a1e61215b3feff1d1e559ea5c')]), - ])), - ('typed_ast', '1.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34')]), - ])), - ('regex', '2019.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '720e34a539a76a1fedcebe4397290604cc2bdf6f81eca44adb9fb2ea071c0c69')]), - ])), - ('black', '19.3b0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c')]), - ])), - ('jupyterlab_code_formatter', '1.3.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7c415caff528e55027e82c96b60dcab0537aa5cda168997f03a4964c51e2b7b5')]), - ])), - ############### - # extras - ('mccabe', '0.6.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f')]), - ])), - ('pyflakes', '2.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8')]), - ])), - ('flake8', '3.8.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208')]), - ])), - ('pydocstyle', '5.0.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5')]), - ])), - ('rope', '0.16.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd2830142c2e046f5fc26a022fe680675b6f48f81c7fc1f03a950706e746e9dfe')]), - ])), - # base for python language server - ('parso', '0.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '908e9fae2144a076d72ae4e25539143d40b8e3eafbaeae03c1bfe226f4cdf12c')]), - ])), - ('jedi', '0.17.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20')]), - ])), - ('python-jsonrpc-server', '0.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '62c543e541f101ec5b57dc654efc212d2c2e3ea47ff6f54b2e7dcb36ecf20595')]), - ('modulename', 'pyls_jsonrpc'), - ])), - ('pluggy', '0.13.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0')]), - ])), - # test - ('versioneer', '0.18', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ead1f78168150011189521b479d3a0dd2f55c94f5b07747b484fd693c3fbf335')]), - ])), - ('lazy-object-proxy', '1.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0')]), - ])), - ('wrapt', '1.11.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1')]), - ])), - ('astroid', '2.3.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '71ea07f44df9568a75d0f354c49143a4575d90645e9fead6dfb52c26a85ed13a')]), - ])), - ('wrapt', '1.11.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1')]), - ])), - ('lazy-object-proxy', '1.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0')]), - ])), - ('typed_ast', '1.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34')]), - ])), - ('six', '1.12.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73')]), - ])), - ('pylint', '2.4.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3db5468ad013380e987410a8d6956226963aed94ecb5f9d3a28acca6d9ac36cd')]), - ])), - ('pytest', '5.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '84dde37075b8805f3d1f392cc47e38a0e59518fb46a431cfdaf7cf1ce805f970')]), - ])), - ('mock', '4.0.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'dd33eb70232b6118298d516bbcecd26704689c386594f0f3c4f13867b2c56f72')]), - ])), - ('pytest-cov', '2.8.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b')]), - ])), - ('pytest-xprocess', '0.13.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '779aeca517cd9c996d1544bdc510cb3cff40c48136d94bbce6148e27f30a93ff')]), - ])), - ('coverage', '5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052')]), - ])), - # python language server - ('python-language-server', '0.36.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9984c84a67ee2c5102c8e703215f407fcfa5e62b0ae86c9572d0ada8c4b417b0')]), - ('modulename', 'pyls'), - ])), - ('jupyter-lsp', '0.9.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '73b5cb47fbe672bee6c02ddc4f2fbe728dc3bff589b5741835ebb80a862163af')]), - ])), - - #################### - # Jupyter Hub - # ('SQLAlchemy', '1.3.10', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '0f0768b5db594517e1f5e1572c73d14cf295140756431270d89496dc13d5e46c')]), - # ])), # part of Python module in version 1.3.1 - # ('python-oauth2', '1.1.1', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', 'd7a8544927ac18215ba5317edd8f640a5f1f0593921bcf3ce862178312c8c9a4')]), - # ('modulename', 'oauth2'), - # ])), # part of Python module - ('pamela', '1.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '65c9389bef7d1bb0b168813b6be21964df32016923aac7515bdf05366acbab6c')]), - ])), - # ('alembic', '1.2.1', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '9f907d7e8b286a1cfb22db9084f9ce4fde7ad7956bb496dc7c952e10ac90e36a')]), - # ])), # part of Python module in version 1.0.8 - ('certipy', '0.1.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '695704b7716b033375c9a1324d0d30f27110a28895c40151a90ec07ff1032859')]), - ])), - ('oauthlib', '3.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889')]), - ])), - ('ruamel.yaml', '0.16.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '412a6f5cfdc0525dee6a27c08f5415c7fd832a7afcb7a0ed7319628aed23d408')]), - ])), - ('ruamel.yaml.clib', '0.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'cee86ecc893a6a8ecaa7c6a9c2d06f75f614176210d78a5f155f8e78d6989509')]), - ('modulename', 'ruamel.yaml'), - ])), - ('python-json-logger', '0.1.11', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b7a31162f2a01965a5efb94453ce69230ed208468b0bbc7fdfc56e6d8df2e281')]), - ('modulename', 'pythonjsonlogger'), - ])), - ('Jinja2', '2.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250')]), - ])), - ('jupyter_telemetry', '0.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '445c613ae3df70d255fe3de202f936bba8b77b4055c43207edf22468ac875314')]), - ])), - ('jupyterhub', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '852a70225a03abd631b36a207f3ffdf69326a0db4cef539825fde39ec1b713d7')]), - ('patches', ['jupyterhub-1.1.0_logoutcookie-2.0.patch']), - # copy 401.html -> <jupyter-install-dir>/share/jupyter/lab/static/ - ])), - ('appmode', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'deac99adaa95e28fa8a56c072be653603cffa49dc06469701ac9c014b690e7c4')]), - ])), - ('HeapDict', '1.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8495f57b3e03d8e46d5f1b2cc62ca881aca392fd5cc048dc0aa2e1a6d23ecdb6')]), - ])), - ('zict', '1.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e34dd25ea97def518fb4c77f2c27078f3a7d6c965b0a3ac8fe5bdb0a8011a310')]), - ])), - ('tblib', '1.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1735ff8fd6217446384b5afabead3b142cf1a52d242cfe6cab4240029d6d131a')]), - ])), - ('sortedcontainers', '2.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '974e9a32f56b17c1bac2aebd9dcf197f3eb9cd30553c5852a3187ad162e1a03a')]), - ])), - ('msgpack', '0.6.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ea3c2f859346fcd55fc46e96885301d9c2f7a36d453f5d8f2967840efa1e1830')]), - ])), - ('dask', '2.30.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a1669022e25de99b227c3d83da4801f032415962dac431099bf0534648e41a54')]), - ])), - ('cloudpickle', '1.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0b6258a20a143603d53b037a20983016d4e978f554ec4f36b3d0895b947099ae')]), - ])), - ('distributed', '2.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '30b0ca195ace1e39bdd278bf1ad257f7674b3e2b8e7a2a37ce7e2ade4aecccf3')]), - ])), - ('dask-jobqueue', '0.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd32ddf3e3c7db29ace102037fa5f61c8db2d945176454dc316a6ffdb8bbfe88b')]), - ])), - ('dask_labextension', '3.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c613f5c76b8fce4fae167eeab3377e0706e5045a27da1200b3b173025a94d94b')]), - ])), - ('Automat', '0.8.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '269a09dfb063a3b078983f4976d83f0a0d3e6e7aaf8e27d8df1095e09dc4a484')]), - ])), - ('PyHamcrest', '1.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8ffaa0a53da57e89de14ced7185ac746227a8894dbd5a3c718bf05ddbd1d56cd')]), - ('modulename', 'hamcrest'), - ])), - ('pyasn1', '0.4.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba')]), - ])), - ('pyasn1-modules', '0.2.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e')]), - ])), - ('service_identity', '18.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0858a54aabc5b459d1aafa8a518ed2081a285087f349fe3e55197989232e2e2d')]), - ])), - ('Twisted', '19.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd5db93026568f60cacdc0615fcd21d46f694a6bfad0ef3ff53cde2b4bb85a39d')]), - ('source_tmpl', '%(name)s-%(version)s.tar.bz2'), - ])), - ('autobahn', '19.10.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '734385b00547448b3f30a752cbfd2900d15924d77dc4a1699b8bce1ea8899f39')]), - ])), - ('constantly', '15.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35')]), - ])), - ('hyperlink', '19.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4288e34705da077fada1111a24a0aa08bb1e76699c9ce49876af722441845654')]), - ])), - ('incremental', '17.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3')]), - ])), - ('txaio', '18.8.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '67e360ac73b12c52058219bb5f8b3ed4105d2636707a36a7cdafb56fe06db7fe')]), - ])), - ('zope.interface', '4.7.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'fd1101bd3fcb4f4cf3485bb20d6cb0b56909b94d3bd2a53a6cb9d381c3da3365')]), - ])), - # ('backcall', '0.1.0', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '38ecd85be2c1e78f77fd91700c76e14667dc21e2713b63876c0eb901196e01e4')]), - # ])), # part of Python module (sanity check fails if package used from Python dependency) - ('wslink', '0.1.13', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6983114e978717a2681815b1ef4b8a0fa2c80c89c6ed09b9554a36c5869cf935')]), - ])), - ('jupyterlab_pygments', '0.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '31deda75bd11b014190764c79f6199aa04ef2d4cf35c1c94270fc2e19c23a5c5')]), - ])), - # ('jupyterlab-nvdashboard', '0.2.1', dict(list(local_common_opts.items()) + [ - # # throughs too many errors if nvtools or driver are not installed - # ('checksums', [('sha256', '2f69a1c3b35250cda252fe6bc684c0e99c3eac87e012843d7fc52f204ab3d0c6')]), - # ])), - ('ipyvue', '1.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5b59cf92a1eb7fbef4f2d02be49ac562a721a6cf34f991ac963222cf4c8885a1')]), - ])), - ('ipyvuetify', '1.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '593f5d6761e304cbb78bca967030485d8835e8d310ef7d83ac1b0e6a13e4911c')]), - ])), - ('voila', '0.2.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '332725b88fb026ad421db90fdad53d7e8de2ffd03a8584a55d9940465932ede8')]), - ])), - ('voila-material', '0.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0827a27f0f23ca87bd8f565c4c227c754516d2a120ffce0f7ab1ee12fdec959f')]), - ('modulename', 'voila'), # fake module name to trick the sanity check - ])), - ('voila-gridstack', '0.0.11', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '851dafe1912199ba36ad5544fb0f624bcb1421d2d31c0f1d03fcdcbf6d626dee')]), - ('modulename', 'voila'), - ])), - ('voila-vuetify', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '522f4ec91eb3912a21215b6b04ad58070d1675809d7d4289ca8e42b577fde100')]), - ('modulename', 'voila'), - ])), - ('pydicom', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9488495a9f3e202d1219f7cb165aee0de31efd512d938c0226d34147ce860391')]), - ])), - ('dicom_upload', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd03f309bbae2094d3db75ffaa9753cca5982d2096ec55720a1f54343cc4a6877')]), - ])), - ('jsfileupload', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '245cd74a3c2ed4356df9a33d0072d8ab295b60b6fdfd69c6795396d455fc8a77')]), - ])), - ('pvlink', '0.3.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a2d5f2c204e0e779a5b865742019b4646b8592d76de87cac724dc84f64eaf80f')]), - ])), - - ('textwrap3', '0.9.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'textwrap3-0.9.2.zip'), - ('checksums', [('sha256', '5008eeebdb236f6303dcd68f18b856d355f6197511d952ba74bc75e40e0c3414')]), - ('use_pip', True), - ])), - ('ansiwrap', '0.8.4', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'ansiwrap-0.8.4.zip'), - ('checksums', [('sha256', 'ca0c740734cde59bf919f8ff2c386f74f9a369818cdc60efe94893d01ea8d9b7')]), - ])), - ('backports.weakref', '1.0.post1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2')]), - ])), - ('backports.tempfile', '1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1c648c452e8770d759bdc5a5e2431209be70d25484e1be24876cf2168722c762')]), - ])), - ('tqdm', '4.41.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '166a82cdea964ae45528e0cc89436255ff2be73dc848bdf239f13c501cae5dc7')]), - ])), - ('tenacity', '6.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '72f397c2bb1887e048726603f3f629ea16f88cb3e61e4ed3c57e98582b8e3571')]), - ])), - ('papermill', '2.2.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1c452b1c5a9ab52b94c99d8b7705ae7173f6aa88a3d28a5d30cffba48a46f5b6')]), - ])), - ('pyviz_comms', '0.7.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '712df4cca33dda351de754742b24361eee8e4b7c1cfb0e24f50dcb802fa25624')]), - ])), - ('Markdown', '3.2.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1fafe3f1ecabfb514a5285fca634a53c1b32a81cb0feb154264d55bf2ff22c17')]), - ('modulename', 'markdown'), - ])), - ('panel', '0.9.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '53340615f30f67f3182793695ebe52bf25e7bbb0751aba6f29763244350d0f42')]), - ])), - ('holoviews', '1.13.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'cf2dd426cdc08bf2ca669268ca0432634574429e5b143314ee06ad88c5382802')]), - ])), - - # PythonPackages for Tutorials - ('xarray', '0.16.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '38e8439d6c91bcd5b7c0fca349daf8e0643ac68850c987262d53526e9d7d01e4')]), - ])), - ('patsy', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f115cec4201e1465cd58b9866b0b0e7b941caafec129869057405bfe5b5e3991')]), - ])), - ('statsmodels', '0.10.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9cd2194c6642a8754e85f9a6e6912cdf996bebf6ff715d3cc67f65dadfd37cc9')]), - ])), - ('cftime', '1.0.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1ac64f8f9066ea756ea27d67cedaf064e7c866275218fa7c84684066a5890f70')]), - ])), - ('vega_datasets', '0.8.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'db8883dab72b6f414e1fafdbf1e8db7543bba6ed77912a4e0c197d74fcfa1c20')]), - ])), - ('Theano', '1.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '35c9bbef56b61ffa299265a42a4e8f8cb5a07b2997dabaef0f8830b397086913')]), - ('modulename', 'theano'), - ])), - ('altair', '3.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9f7c521239ac5a207c3cffc29c5bdde0854fff0dec0b5f91f086ba8e5f1de8a9')]), - ])), - ('cssselect', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f95f8dedd925fd8f54edb3d2dfb44c190d9d18512377d3c1e2388d16126879bc')]), - ])), - ('smopy', '0.0.7', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '578b5bc2502176d210f176ab94e77974f43b32c95cd0768fb817ea2499199592')]), - ])), - ('joblib', '0.14.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0630eea4f5664c463f23fbf5dcfc54a2bc6168902719fa8e19daf033022786c8')]), - ])), - ('scikit-learn', '0.22', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '314abf60c073c48a1e95feaae9f3ca47a2139bd77cebb5b877c23a45c9e03012')]), - ('modulename', 'sklearn'), - ])), - ('memory_profiler', '0.55.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5fa47b274c929dd2cbcd9190afb62fec110701251d2ac2d301caaf545c81afc1')]), - ])), - ('h5py', '2.10.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d')]), - ])), - ('line_profiler', '3.1.0', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '7218ad6bd81f8649b211974bf108933910f016d66b49651effe7bbf63667d141')]), - # ('use_pip', False), # no pip ! uses PEP 517 - ('source_tmpl', 'line_profiler-3.1.0-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', 'a66e089e6d98ab8a70b5f89c0367c6780abad0f0b1d624dbe5edd8f0083986c7')]), - ('unpack_sources', False), # whl package instead of tar.gz because building with skbuild->cmake fails - ])), - ('llvmlite', '0.35.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '80e51d5aa02ad72da9870e89d21f9b152b0220ca551b4596a6c0614bcde336fc')]), - ])), - ('numba', '0.52.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '44661c5bd85e3d3619be0a40eedee34e397e9ccb3d4c458b70e10bf95d1ce933')]), - ])), - ('arviz', '0.10.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'dc1f08ce02459989dd4f41fa6012e736cf2ba00ee44b29bebe0a451c58a68e42')]), - ])), - ('pymc3', '3.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1bb2915e4a29877c681ead13932b0b7d276f7f496e9c3f09ba96b977c99caf00')]), - ])), - ('numexpr', '2.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b0d239d9827e1bcee08344fd05835823bc60aff97232e35a928214d03ff802b1')]), - ])), - ('ipythonblocks', '1.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ba923cb7a003bddee755b5a7ac9e046ffc093a04b0bdede8a0a51ef900aed0ba')]), - ])), - ('pydub', '0.23.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c362fa02da1eebd1d08bd47aa9b0102582dff7ca2269dbe9e043d228a0c1ea93')]), - ])), - ('multipledispatch', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a7ab1451fd0bf9b92cab3edbd7b205622fb767aeefb4fb536c2e3de9e0a38bea')]), - ])), - ('partd', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6e258bf0810701407ad1410d63d1a15cfd7b773fd9efe555dac6bb82cc8832b0')]), - ])), - ('locket', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1fee63c1153db602b50154684f5725564e63a0f6d09366a1cb13dffcec179fb4')]), - ])), - ('fsspec', '0.6.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ffd7cd5ac32f36698097c3d78c2c433d4c12f7e4bce3a3a4036fd3491188046d')]), - ])), - ('datashape', '0.5.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2356ea690c3cf003c1468a243a9063144235de45b080b3652de4f3d44e57d783')]), - ])), - ('datashader', '0.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b1f80415f72f92ccb660aaea7b2881ddd35d07254f7c44101709d42e819d6be6')]), - ])), - ('selenium', '3.141.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'deaf32b60ad91a4611b98d8002757f29e6f2c2d5fcaf202e1c9ad06d6772300d')]), - ])), - ('graphviz', '0.13.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'graphviz-0.13.2.zip'), - ('checksums', [('sha256', '60acbeee346e8c14555821eab57dbf68a169e6c10bce40e83c1bf44f63a62a01')]), - ])), - ('vincent', '0.4.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5765bcd360140d2304e52728ad1d4382f3f919ea259a13932828680f2d84fcd3')]), - ])), - ('tailer', '0.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '78d60f23a1b8a2d32f400b3c8c06b01142ac7841b75d8a1efcb33515877ba531')]), - ])), - # Dash - ('Flask', '1.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52')]), - ])), - ('Flask-Compress', '1.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '468693f4ddd11ac6a41bca4eb5f94b071b763256d54136f77957cfee635badb3')]), - ])), - ('Werkzeug', '1.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c')]), - ])), - ('hiredis', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '996021ef33e0f50b97ff2d6b5f422a0fe5577de21a8873b58a779a5ddd1c3132')]), - ])), - ('redis', '3.5.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2')]), - ])), - ('Flask-Caching', '1.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a0356ad868b1d8ec2d0e675a6fe891c41303128f8904d5d79e180d8b3f952aff')]), - ])), - ('dash', '1.16.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'fdfe2a16c4041c7769e06e4e0eaaeb65cfd8351797ed4b37efe001384637b6c7')]), - ])), - ('dash_renderer', '1.8.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '736f8e0dcce244699457b54a2ee262a04baf389db1a8f54e98c949bb3e7e487f')]), - ])), - ('dash_core_components', '1.12.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '803b54d01ca48b90f3fc1652f7be9f818ed2882da8975a51b99fc2d77dd2727e')]), - ])), - ('dash_html_components', '1.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2c662e640528c890aaa0fa23d48e51c4d13ce69a97841d856ddcaaf2c6a47be3')]), - ])), - ('dash_table', '4.10.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1aa02180faef13434364286b60404d26164d1ce2779c765c9c52e6935991a4e9')]), - ])), - ('dash-bootstrap-components', '0.10.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '40500e692f59648dbc024b9e921a1e1410ee0bc97cfa963990e54e42523679b4')]), - ])), - ('dash_daq', '0.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a1d85b6799f7b885652fbc44aebdb58c41254616a8d350b943beeb42ade4256a')]), - ])), - ('dash_player', '0.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '46114910b497f35f1aa496ed8b9ff1457d07c96171227961b671ba4164c537a0')]), - ])), - ('dash_canvas', '0.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '72fcfb37e1c0f68c08f6fa6cf0b5be67ecc66fcfb5253231ffc450957b640b31')]), - ])), - ('dash_bio', '0.4.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a6f480b060488ab9be26129562a7bcb491ef40ae0ffae46f499fb7d5997de06c')]), - ])), - ('dash_cytoscape', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0669c79c197e4b150a5db7a278d1c7acebc947f3f5cbad5274835ebb44f712cd')]), - ])), - ('ansi2html', '1.5.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '96ae85ae7b26b7da674d87de2870ba4d1964bca733ae4614587080b6358c3ba9')]), - ])), - ('jupyter-dash', '0.3.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e41d68aa85a7dd53480cc33feae04f4cfd7ac2ace8089c1e1630a2342e8bd8aa')]), - ])), - # more - ('fastcore', '1.3.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '550c201f6a0f7001994898260588e3df310968165c43ecc4fa3062dd6eee5956')]), - ])), - ('fastscript', '1.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '67d2315a508ffd0499af590fffaa63d276ce6eaff73ffbd60eb3315ba38d08fa')]), - ])), - # ('nbdev', '1.1.5', dict(list(local_common_opts.items()) + [ - # # nbdev < 2 needs nbconvert < 6, will change with nbdev >= 2 - # ('checksums', [('sha256', '0ce349625514e2865bbc023db597d791c45b572a7bbc8a49b320a327b9e7b721')]), - # ])), - ('PyJWT', '1.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96')]), - ('modulename', 'jwt'), - ])), - ('pyunicore', '0.9.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '09cd91562feb7677c8bd9cbbe02e1b0d27d1b5817e12ef5bf4701619cac77d9b')]), - ])), - - # misc - # ('rpy2', '3.2.4', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '3daf1a4b28c4e354ef989093f03b066908bf6e5082a6f4af72cc3fd928a28dc6')]), - # ])), -] - -local_jupyter_config_path = 'etc/jupyter' -local_jupyter_path = 'share/jupyter' -local_jupyterlab_dir = 'share/jupyter/lab' - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - # search path to find installable data files, such as kernelspecs and notebook extensions - 'JUPYTER_PATH': [local_jupyter_path], - 'JUPYTERLAB_DIR': [local_jupyterlab_dir], - # do NOT set JUPYTER_CONFIG_DIR: if not set, if will be ${HOME}/.jupyter, which is just right - 'JUPYTER_CONFIG_PATH': [local_jupyter_config_path] # config dir at install time. - # ATTENTION: not config dir at runtime, because this is fixed to {sys.prefix}/etc/jupyter/ -} - -modextravars = { - 'JUPYTER': '%(installdir)s/bin/jupyter', - 'MKL_THREADING_LAYER': 'GNU', # https://github.com/Theano/Theano/issues/6568 -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is fist entry in JUPYTHER_PATH and JUPYTER_DATA_DIR -# https://jupyter.readthedocs.io/en/latest/projects/jupyter-directories.html#envvar-JUPYTER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -prepend_path("JUPYTER_DATA_DIR", pathJoin(os.getenv("HOME"),".local/share/jupyter")) -""" - -postinstallcmds = [ - # ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - - 'echo "#!/bin/bash" > %(builddir)s/env.sh', - 'echo "source %(installdir)s/bin/activate" >> %(builddir)s/env.sh', - ( - 'echo "export PYTHONPATH=' - '%(installdir)s/lib/python3.8/site-packages:${EBROOTPYTHON}/lib/python3.8/site-packages:${PYTHONPATH}"' - ' >> %(builddir)s/env.sh' - ), - # Jupyter Paths - http://jupyter.readthedocs.io/en/latest/projects/jupyter-directories.html - 'echo "export JUPYTER=%(installdir)s/bin/jupyter" >> %(builddir)s/env.sh', - 'echo "export JUPYTER_PATH=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyter_path, - 'echo "export JUPYTERLAB_DIR=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyterlab_dir, - # Config dir at install time. ATTENTION: not config dir at runtime. This is picked up by JUPYTER_CONFIG_PATH - 'echo "export JUPYTER_CONFIG_DIR=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyter_config_path, - # jupyter will use $JUPYTER_CONFIG_DIR with "--user" - 'echo "export JUPYTER_DATA_DIR=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyter_path, - 'echo "export PATH=%(installdir)s/bin:${PATH}" >> %(builddir)s/env.sh', - - # NodeJS packages - 'source %(builddir)s/env.sh && npm install -g phantomjs-prebuilt@2.1.16', # req. export_png/export_svg of bokeh - - # Jupyter Notebook Extensions - 'source %(builddir)s/env.sh && jupyter contrib nbextension install --user', - 'source %(builddir)s/env.sh && jupyter nbextensions_configurator enable --user', - 'source %(builddir)s/env.sh && jupyter nbextension enable codefolding/main --user', - - ('source %(builddir)s/env.sh && ' - ' jupyter nbextension install widgetsnbextension --py --nbextensions=${JUPYTER_PATH}/nbextensions'), - 'source %(builddir)s/env.sh && jupyter nbextension enable widgetsnbextension --py --user', - - 'source %(builddir)s/env.sh && jupyter nbextension enable ipyvuetify --py --user', - - 'source %(builddir)s/env.sh && jupyter nbextension install rise --py --nbextensions=${JUPYTER_PATH}/nbextensions', - 'source %(builddir)s/env.sh && jupyter nbextension enable rise --py --user', - - 'source %(builddir)s/env.sh && jupyter nbextension enable appmode --py --user', - 'source %(builddir)s/env.sh && jupyter serverextension enable appmode --py', - - # Jupyter Notebook Extensions - 'source %(builddir)s/env.sh && jupyter labextension install @jupyter-widgets/jupyterlab-manager@2.0.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyter-widgets/jupyterlab-sidecar@0.5.0 --no-build', - - # 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/server-proxy@2.1.1 --no-build', - ('source %(builddir)s/env.sh && ' - ' cd %(builddir)s/jupyterserverproxy/jupyter-server-proxy-1.5.2/jupyterlab-server-proxy/ && ' - ' npm install && ' # install npm package dependencies incurrent directory - ' npm run build && ' # optional build step if using TypeScript, babel, etc. - ' jupyter labextension install --no-build'), # install the current directory as an extension' - # 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/github@2.0.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/latex@2.0.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/git@0.23.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/toc@4.0.0 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-datawidgets@6.3.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-plotly@4.8.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-gitlab@2.0.0 --no-build', - # 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab_iframe@0.2.2 --no-build', - - # this might interfer with Xpra-icon: https://github.com/cmd-ntrf/jupyter-lmod/issues/30 - ('source %(builddir)s/env.sh && ' - ' cd %(builddir)s/jupyterlmod/jupyter-lmod-2.0.2/jupyterlab/ && ' - ' npm install && ' # install npm package dependencies incurrent directory - ' npm run build && ' # optional build step if using TypeScript, babel, etc. - ' jupyter labextension install --no-build'), # install the current directory as an extension - # 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-nvdashboard@0.3.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-dash@0.3.0 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-topbar-extension@0.5.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-system-monitor@0.6.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-theme-toggle@0.5.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-control@1.1.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-favorites@2.0.0 --no-build', - # store default in ../share/jupyter/lab/schemas/jupyterlab-favorites/favorites.json - # 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-tour@2.1.1 --no-build', - # tour-state on/off is saved in workspace-file in ~/.jupyter/lab/workspaces/ - - 'source %(builddir)s/env.sh && jupyter labextension install jupyter-matplotlib@0.7.4 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyter-leaflet@0.13.2 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyter-threejs@2.2.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyter-vuetify@1.5.1 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install nbdime-jupyterlab@2.0.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install dask-labextension@3.0.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install ipyvolume@0.6.0-alpha.5 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install plotlywidget@4.12.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install bqplot@0.5.17 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install dicom-upload@0.2.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jsfileupload@0.2.0 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install @pyviz/jupyterlab_pyviz@1.0.4 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @bokeh/jupyter_bokeh@2.0.2 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyter-voila/jupyterlab-preview@1.1.0 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install @parente/jupyterlab-quickopen@0.5.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @ryantam626/jupyterlab_code_formatter@1.3.8 --no-build', - ('source %(builddir)s/env.sh && ' - ' jupyter labextension install @krassowski/jupyterlab_go_to_definition@1.0.0 --no-build'), - 'source %(builddir)s/env.sh && jupyter labextension install @krassowski/jupyterlab-lsp@2.1.2 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install pvlink@0.3.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install itkwidgets@0.32.0 --no-build', - - # build JupyterLab app directory for all previous installed extensions in one go - 'source %(builddir)s/env.sh && jupyter lab build --dev-build=False', # --minimize=False - - # jupyterlab server extensions - 'source %(builddir)s/env.sh && jupyter serverextension enable jupyter_server_proxy --py', - 'source %(builddir)s/env.sh && jupyter serverextension enable jupyterlab_code_formatter --py', - # 'source %(builddir)s/env.sh && jupyter serverextension enable jupyterlab_sql --py', - # 'source %(builddir)s/env.sh && jupyter serverextension enable jupyterlab_iframe --py', - 'source %(builddir)s/env.sh && jupyter serverextension enable jupyterlab_git --py', - 'source %(builddir)s/env.sh && jupyter serverextension enable jupyter_lsp --py', - 'source %(builddir)s/env.sh && jupyter serverextension enable dask_labextension --py', - - # configure jupyterlab extensions - - # Send2Trash - # disable - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - 'c.FileContentsManager.delete_to_trash = False\n' - 'EOF' - ), - - # GitLab-extension - # for security reasons access-token must be set in the server extension: - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - '# no username+password needed, if repo is public or we have the token for a specific URL\n' - '# c.GitLabConfig.access_token = "<API-TOKEN>" # give api-access of user "jupyter.jsc@fz-juelich.de"\n' - 'c.GitLabConfig.allow_client_side_access_token = False\n' - 'c.GitLabConfig.url = "https://gitlab.version.fz-juelich.de"\n' - 'c.GitLabConfig.validate_cert = False\n' - 'EOF' - ), - - # GitHub-extension - # for security reasons access-token must be set in the server extension: - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - '# no username+password needed, if repo is public or we have the token for a specific URL\n' - '# c.GitHubConfig.access_token = "<API-TOKEN>"\n' - 'c.GitHubConfig.allow_client_side_access_token = False\n' - 'c.GitHubConfig.url = "https://github.com"\n' - 'c.GitHubConfig.validate_cert = False\n' - 'EOF' - ), - - # iframe-extension - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - '# c.JupyterLabIFrame.iframes = [\'list\', \'of\', \'sites\']\n' - 'c.JupyterLabIFrame.welcome = "http://www.fz-juelich.de/jsc"\n' - 'EOF' - ), - - # define .ipynb_checkpoints permissions - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - 'c.FileCheckpoints.checkpoint_permissions = 0o664\n' - 'c.FileCheckpoints.restore_permissions = 0o644\n' - 'c.FileCheckpoints.checkpoint_dir_umask = 0o002\n' - 'EOF' - ), - - # modify the installation files, which would be overwritten if done before (must be last commands to run) - ( - 'cp %(builddir)s/jupyterlabgitlab/jupyterlab-gitlab-2.0.0/schema/drive.json' - ' %(installdir)s/share/jupyter/lab/schemas/jupyterlab-gitlab/drive.json' - ), - - # Add the overrides file - ( - '{ cat > %(builddir)s/file_jupyter-overrides_jsc.patch; } << \'EOF\'\n' - 'diff -Naur share.orig/jupyter/lab/settings/overrides.json share/jupyter/lab/settings/overrides.json\n' - '--- share.orig/jupyter/lab/settings/overrides.json 1970-01-01 01:00:00.000000000 +0100\n' - '+++ share/jupyter/lab/settings/overrides.json 2019-11-26 13:40:46.560731000 +0100\n' - '@@ -0,0 +1,8 @@\n' - '+{\n' - '+ "jupyterlab-gitlab:drive": {\n' - '+ "baseUrl": "https://gitlab.version.fz-juelich.de"\n' - '+ },\n' - '+ "@parente/jupyterlab-quickopen:plugin": {\n' - '+ "relativeSearch": true\n' - '+ }\n' - '+}\n' - 'EOF' - ), - 'patch -p0 -d %(installdir)s < %(builddir)s/file_jupyter-overrides_jsc.patch', - - # add webpage, which leads back to https://jupyter-jsc.fz-juelich.de - 'cp %%(builddir)s/jupyterlab/jupyterlab-%s/401.html %%(installdir)s/share/jupyter/lab/static/' % local_jlab_version, - - # ################################################### - # IMPORTANT: - # start JupyterLab once (for 60 seconds) to allow some cleanup at first start - # ################################################### - ( - 'source %(builddir)s/env.sh && ' - '{(jupyter lab --no-browser) & } && JLAB_PID=$! && ' - 'sleep 60 && ' - 'jupyter notebook list --json | grep $JLAB_PID | ' - 'awk \'{for(i=1;i<=NF;i++)if($i=="\\"port\\":")print $(i+1)}\' | sed \'s/,*$//g\' | ' - 'xargs -i jupyter notebook stop {}' - ), - - # Ensure Jupyter does not want to build anything on startup - # The build_config.json file is used to track the local directories that have been installed - # using jupyter labextension install <directory>, as well as core extensions that have been explicitly uninstalled. - # 'if [ -e %(installdir)s/share/jupyter/lab/settings/build_config.json ]; then exit 1; fi ', - ( - '{ cat > %(installdir)s/share/jupyter/lab/settings/build_config.json; } << \'EOF\'\n' - '{\n' - ' "local_extensions": {}\n' - '}\n' - 'EOF' - ), - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm %(installdir)s/bin/python', - 'rm %(installdir)s/bin/python3', - 'rm %(installdir)s/bin/activate', - 'rm %(installdir)s/bin/activate*', - 'rm %(installdir)s/bin/easy_install*', - 'rm %(installdir)s/bin/pip*', - - # Compile Python files to byte-code to speedup execution - # ERROR: returns with exit code 1, because some files cannot be compiled for different reasons - # ################################################### - # Disable possible, because sanity check will # - # force the compile of all python packages anyway# - # ################################################### - # 'source %(builddir)s/env.sh && python -m compileall %(installdir)s', - - # ################################################### - # IMPORTANT: must be done manual after eb-install: # - # ################################################### - # 'chmod -R g-w %(installdir)s ', # software-group must not modify the installation on accident - # 'chmod -R ugo-w %(installdir)s/share ', # Noone should add files/configs to the global share after install - # 'chmod -R ug-w ...../2020/software/Python/3.8.5-GCCcore-9.3.0/share ', # Python module, too -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True # would result in sanity-errors about yaml,ipython_genutils,IPython,traitlets -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/Jupyter/Jupyter-2020.2.6-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/j/Jupyter/Jupyter-2020.2.6-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index a9b2b3497eb671688af31e11fc1a159b91cf6a99..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/Jupyter-2020.2.6-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,1214 +0,0 @@ -easyblock = 'Bundle' - -name = 'Jupyter' -version = '2020.2.6' -versionsuffix = '-Python-%(pyver)s' - -local_jlab_version = '2.2.9' - -homepage = 'http://www.jupyter.org' -description = """ -Project Jupyter exists to develop open-source software, open-standards, and services for interactive computing across -dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('unzip', '6.0'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('libyaml', '0.2.5'), - ('Pandoc', '2.11.0.4', '', SYSTEM), # For doc-generation - ('texlive', '20200406'), - ('ITK', '5.1.2', '-nompi' + versionsuffix), - ('HDF5', '1.10.6', '-serial'), - ('h5py', '2.10.0', '-serial' + versionsuffix), - ('netcdf4-python', '1.5.4', '-serial' + versionsuffix), - ('FFmpeg', '4.3.1'), # for pydub - ('LLVM', '10.0.1'), # llvmlite - ('git', '2.28.0'), # for jupyterlab-git (req. >=2.0) - ('SciPy-Stack', '2020', versionsuffix), - ('scikit', '2020', versionsuffix), - ('numba', '0.51.1', versionsuffix), - ('Shapely', '1.7.1', versionsuffix), -] - -osdependencies = [('openssl'), ('git')] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_default_options = { - 'filter': ('python -c "import %(ext_name)s"', ''), - 'download_dep_fail': True, # True - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': True, - 'use_pip_for_deps': False, # False -} - -components = [ - ('nodejs', '14.15.3', { - 'easyblock': 'ConfigureMake', - 'source_urls': ['http://nodejs.org/dist/v%(version)s/'], - 'sources': ['node-v%(version)s.tar.gz'], - 'start_dir': 'node-v%(version)s', - }), -] - -exts_list = [ - ('ptvsd', '4.3.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'ptvsd-4.3.2.zip'), - ('checksums', [('sha256', '3b05c06018fdbce5943c50fb0baac695b5c11326f9e21a5266c854306bda28ab')]), - ])), - ('pyOpenSSL', '19.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507')]), - ('modulename', 'OpenSSL'), - ])), - ('entrypoints', '0.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451')]), - ('use_pip', False), - ])), - ('async_generator', '1.10', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144')]), - ])), - ('nest_asyncio', '1.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'eaa09ef1353ebefae19162ad423eef7a12166bcc63866f8bff8f3635353cd9fa')]), - ])), - ('absl-py', '0.10.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b20f504a7871a580be5268a18fbad48af4203df5d33dbc9272426cb806245a45')]), - ('modulename', 'absl'), - ])), - ('websockify', '0.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c35b5b79ebc517d3b784dacfb993be413a93cda5222c6f382443ce29c1a6cada')]), - ])), - ('typing_extensions', '3.7.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c')]), - ])), - # General Python packages - ('tornado', '6.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791')]), - ])), - ('bokeh', '2.2.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c4a3f97afe5f525019dd58ee8c4e3d43f53fe1b1ac264ccaae9b02c07b2abc17')]), - ])), - ('nbformat', '5.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1d223e64a18bfa7cdf2db2e9ba8a818312fc2a0701d2e910b58df66809385a56')]), - ])), - ('param', '1.9.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8370d41616e257b8ed2e242ec531e0340b8c954bea414b791fa0ef6235959981')]), - ])), - # Jupyter-core and dependencies - ('alabaster', '0.7.12', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02')]), - ])), - ('Babel', '2.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e86135ae101e31e2c8ec20a4e0c5220f4eed12487d5cf3f78be7e98d3a57fc28')]), - ('modulename', 'babel'), - ])), - ('snowballstemmer', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52')]), - ])), - ('docutils', '0.15.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99')]), - ])), - ('imagesize', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5')]), - ])), - ('sphinxcontrib-websupport', '1.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1501befb0fdf1d1c29a800fdbf4ef5dc5369377300ddbdd16d2cd40e54c6eefc')]), - ('modulename', 'sphinxcontrib'), - ])), - ('Sphinx', '1.8.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c7658aab75c920288a8cf6f09f244c6cfdae30d82d803ac1634d9f223a80ca08')]), - ('modulename', 'sphinx'), - ])), - ('ipynb', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8d834c777ca3885289938728cc382f081c86a58e92961e86f0aba60c96938ce5')]), - ])), - ('jupyter_core', '4.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '79025cb3225efcd36847d0840f3fc672c0abd7afd0de83ba8a1d3837619122b4')]), - ])), - ('retrying', '1.3.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '08c039560a6da2fe4f2c426d0766e284d3b736e355f8dd24b37367b0bb41973b')]), - ])), - ('plotly', '4.14.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7d8aaeed392e82fb8e0e48899f2d3d957b12327f9d38cdd5802bc574a8a39d91')]), - ])), - ('tikzplotlib', '0.9.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bf205e74e27e9efefde70d7773675a8432dab600741ac8c0db93daaeb7fc957c')]), - ])), - # Jupyter client - ('jupyter_client', '6.1.11', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '649ca3aca1e28f27d73ef15868a7c7f10d6e70f761514582accec3ca6bb13085')]), - ])), - ('pynvml', '8.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c8d4eadc648c7e12a3c9182a9750afd8481b76412f83747bcc01e2aa829cde5d')]), - ])), - # Jupyter notebook and dependencies - ('singledispatch', '3.4.0.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c')]), - ])), - ('ipyparallel', '6.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0a97b276c62db633e9e97a816282bdd166f9df74e28204f0c8fa54b71944cfdc')]), - ])), - ('ipykernel', '5.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '697103d218e9a8828025af7986e033c89e0b36e2b6eb84a5bda4739b9a27f3cb')]), - ])), - ('terminado', '0.9.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '89e6d94b19e4bc9dce0ffd908dfaf55cc78a9bf735934e915a4a96f65ac9704c')]), - ('use_pip', False), - ])), - ('bleach', '3.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433')]), - ])), - ('mistune', '0.8.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e')]), - ])), - ('pandocfilters', '1.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb')]), - ])), - ('testpath', '0.4.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e')]), - ('use_pip', False), - ])), - ('nbconvert', '6.0.7', dict(list(local_common_opts.items()) + [ - # !!! nbconvert will try to read from all paths in <jupyter-config-path> the file nbconvert/templates/conf.json - # ensure it has permissions (https://github.com/jupyter/nbconvert/issues/1430) - ('checksums', [('sha256', 'cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002')]), - # 6.0.7 - patch for jupyter_contrib_nbextensions needed: - # https://github.com/ipython-contrib/jupyter_contrib_nbextensions/pull/1532 - ])), - ('Send2Trash', '1.5.0', dict(list(local_common_opts.items()) + [ # req. by widgetsnbextension - ('checksums', [('sha256', '60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2')]), - ('modulename', 'send2trash'), - ])), - ('argon2-cffi', '20.1.0', dict(list(local_common_opts.items()) + [ # req. for notebook >= 6.1 - ('checksums', [('sha256', 'd8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d')]), - ('modulename', 'argon2'), - ])), - ('notebook', '6.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0464b28e18e7a06cec37e6177546c2322739be07962dd13bf712bcb88361f013')]), - ('patches', ['notebook-6.0.3_jsc.patch']), # allow others to read/write in .ipynb_checkpoints - ])), - ('version_information', '1.0.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '811b9cea4b376aa62a77dc729a937ce8e2844573b8686b5c1840147054fb938d')]), - ])), - ('lesscpy', '0.14.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7b664f60818a16afa8cc9f1dd6d9b17f944e0ce94e50787d76f81bc7a8648cce')]), - ])), - ('prometheus_client', '0.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03')]), - ])), - ('jupyterthemes', '0.20.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2a8ebc0c84b212ab99b9f1757fc0582a3f53930d3a75b2492d91a7c8b36ab41e')]), - ])), - # Jupyter Lab and dependencies - ('zipp', '3.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb')]), - ])), - ('jupyter-packaging', '0.7.12', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b140325771881a7df7b7f2d14997b619063fe75ae756b9025852e4346000bbb8')]), - ])), - ('importlib_metadata', '3.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d')]), - ])), - ('jupyterlab_launcher', '0.13.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f880eada0b8b1f524d5951dc6fcae0d13b169897fc8a247d75fb5beadd69c5f0')]), - ])), - ('sphinx_rtd_theme', '0.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a')]), - ])), - ('commonmark', '0.9.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60')]), - ])), - ('recommonmark', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '29cd4faeb6c5268c633634f2d69aef9431e0f4d347f90659fd0aab20e541efeb')]), - ])), - ('jupyterlab', local_jlab_version, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3be8f8edea173753dd838c1b6d3bbcb6f5c801121f824a477025c1b6a1d33dc6')]), - ('patches', [('401html.patch', 1)]) - ])), - ('json5', '0.8.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '124b0f0da1ed2ff3bfe3a3e9b8630abd3c650852465cb52c15ef60b8e82a73b0')]), - ])), - ('jupyterlab_server', '1.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5431d9dde96659364b7cc877693d5d21e7b80cea7ae3959ecc2b87518e5f5d8c')]), - ])), - ('jupyter_kernel_gateway', '2.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6424a9f118e757ef18e9bed7784ca05ad9e633945df328ac4d8810eadc6f6ccd')]), - ('modulename', 'kernel_gateway'), - ])), - ('nbclient', '0.5.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0ed6e5700ad18818030a3a5f0f201408c5972d8e38793840cd9339488fd9f7c4')]), - ])), - # Jupyter Widgets and dependencies - ('defusedxml', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5')]), - ])), - ('widgetsnbextension', '3.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '079f87d87270bce047512400efd70238820751a11d2d8cb137a5a5bdbaf255c7')]), - ])), - ('ipywidgets', '7.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e945f6e02854a74994c596d9db83444a1850c01648f1574adf144fbbabe05c97')]), - ])), - # ('ipyscales', '0.6.0', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', 'f2c42776482081b2771a2789623e1744a4e578f5d51644c1450b53738292b069')]), - # ])), # fails because of PEP 517 - ('ipydatawidgets', '4.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd9f94828c11e3b40350fb14a02e027f42670a7c372bcb30db18d552dcfab7c01')]), - ])), - ('traittypes', '0.2.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'be6fa26294733e7489822ded4ae25da5b4824a8a7a0e0c2dccfde596e3489bd6')]), - ])), - ('bqplot', '0.12.22', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3f0eee5e2b03866f729ef295f9f46310dcafa832040064f40995a3e876986855')]), - ])), - ('jupyter_bokeh', '2.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd337306021fb6a8e63556dbe2eaf539a8025aa76d9ba512b18189bfb1f708177')]), - ])), - ('pythreejs', '2.2.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'acceeac346054bcc280bf93f494557e6f5ab404860cfee584d72a6c3dde16463')]), - ])), - ('ipywebrtc', '0.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4db2be7d0bfcbd142b2f9f9e8303b926832a632ed4a3bc5681b319a5f226285a')]), - ])), - ('ipyvolume', '0.6.0a6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1a71c681dd39b514db966c4812bbbd1347ce082ee7a7bcc53f494e0546bf37ff')]), - ])), - ('branca', '0.3.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3e762c9bdf40725f3d05ea1fda8fae9b470bfada6474e43a1242c8204a7bb15e')]), - ])), - ('ipyleaflet', '0.13.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c05f138327926bc81f29a629588bd656be5ff76dd8785c1e7eac5445b1d5a432')]), - ])), - ('ipympl', '0.5.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0e2f2e540a2dfea61524b7993fc8552c9236b1aaa3826e1f382c75cb2fa5c382')]), - ])), # respect version lookup table: https://github.com/matplotlib/ipympl - # Jupyter Notebook Extensions - ('jupyter_nbextensions_configurator', '0.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e5e86b5d9d898e1ffb30ebb08e4ad8696999f798fef3ff3262d7b999076e4e83')]), - ])), - ('jupyter_latex_envs', '1.4.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '070a31eb2dc488bba983915879a7c2939247bf5c3b669b398bdb36a9b5343872')]), - ('patches', ['jupyter_latex_envs-template_paths.patch']), - # support for nbconvert>=6.x -> https://github.com/jfbercher/jupyter_latex_envs/pull/58 - ('modulename', 'latex_envs'), - ])), - ('jupyter_highlight_selected_word', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9fa740424859a807950ca08d2bfd28a35154cd32dd6d50ac4e0950022adc0e7b')]), - ])), - ('jupyter_contrib_core', '0.3.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e65bc0e932ff31801003cef160a4665f2812efe26a53801925a634735e9a5794')]), - ])), - ('jupyter_contrib_nbextensions', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'eecd28ecc2fc410226c0a3d4932ed2fac4860ccf8d9e9b1b29548835a35b22ab')]), - ('patches', ['jupyter_contrib_nbextensions-template_paths.patch']), - # support for nbconvert>=6.x -> https://github.com/ipython-contrib/jupyter_contrib_nbextensions/pull/1532 - ])), - ('rise', '5.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '641db777cb907bf5e6dc053098d7fd213813fa9a946542e52b900eb7095289a6')]), - ])), - ('idna-ssl', '1.1.0', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', 'a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c')]), - ])), - ('multidict', '4.7.5', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', 'aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e')]), - ])), - ('yarl', '1.6.3', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', '8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10')]), - ])), - ('async-timeout', '3.0.1', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', '0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f')]), - ])), - ('aiohttp', '3.6.2', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', '259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326')]), - ])), - ('simpervisor', '0.3', dict(list(local_common_opts.items()) + [ # indirect dep of jupyter-server-proxy - ('checksums', [('sha256', 'd82e4527ae326747551e4bdfa632ff4ebef275ce721f80886c747adfdbf41c2e')]), - ])), - ('sniffio', '1.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de')]), - ])), - ('anyio', '2.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8a56e08623dc55955a06719d4ad62de6009bb3f1dd04936e60b2104dd58da484')]), - ])), - ('jupyter_server', '1.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b4b1f8f0c38e1faf418f3ff536cdb6d84dfb477d150abee5f0e65c7fd95fa527')]), - ])), - ('jupyter-server-proxy', '1.5.2', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://github.com/jupyterhub/jupyter-server-proxy/archive/']), - ('source_tmpl', 'v%(version)s.tar.gz'), - ('checksums', [('sha256', '140bd642c511519ddd2acc5f70e0b46a40bbc673c888dcb3b19981005286853b')]), - ('patches', ['jupyterserverproxy-urlfile.patch']), - ])), - # Jupyter Lab Extensions - ('jupyterlab_github', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1f560a91711b779d08118161af044caff44159e315cb80ae830d3dfbded7bac9')]), - # do not use pypi for download -> we need to patch drive.json - ('source_urls', ['https://github.com/jupyterlab/jupyterlab-github/archive']), - ('source_tmpl', 'v%(version)s.tar.gz'), - ('patches', ['jupyterlab_github-%(version)s_jsc.patch']), - ])), - ('jupyterlab-gitlab', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '31d3fbd9b139aca7704dd13967ac22c81e372b50c4db730f342ab7800db7a5c6')]), - # do not use pypi for download -> we need to patch drive.json - ('source_urls', ['https://gitlab.com/beenje/jupyterlab-gitlab/-/archive/%(version)s']), - ('source_tmpl', 'jupyterlab-gitlab-%(version)s.tar.gz'), - ('patches', ['jupyterlab-gitlab-%(version)s_jsc.patch']), - ])), - ('jupyterlab-quickopen', '0.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('90cba3a7e6ff8d2b3eaf2594079c25f82e2c158d2ec63ebd951e4042a7445f8e')]), - ])), - ('zstandard', '0.12.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a110fb3ad1db344fbb563942d314ec5f0f3bdfd6753ec6331dded03ad6c2affb')]), - ])), - # ('jupyterlab_hdf', '0.5.1', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '9397fed0487e5811d1147851c3d7781fc9a53eb9c7b4471eeebf555f38ddc02e')]), - # ])), - # ('elyra-server', '1.5.3', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '36dd386862252636bd1ff9af657fbb2c51d59a84c99332be9d749c64eed3410f')]), - # ])), # req. kfp, kfp-notebook, minio, rfc3986-validator - # ('elyra-pipeline-editor-extension', '1.5.3', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', 'da134c224e968a1df643f74b99263a6ee21a557bda5615d779474e77a1f7e90c')]), - # ])), - # https://blog.kitware.com/itk-is-on-pypi-pip-install-itk-is-here/ - ('itk_core', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_core-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '1f0254f0f134b709e6df3601dc38ccc45c2d5d5576fc10e1a0313d1fe8aefa84')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_filtering', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_filtering-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', 'f4a1256c8c684329780b9f4326fb571023af1d96fbda7cb7b513b4a395a9cd42')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_segmentation', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_segmentation-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', 'f81ba4881f7802eb5b911c4e1eac4706220647196ebda68477318630542db226')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_numerics', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_numerics-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '6ac84a6386cd51692ed9605b6daefcc6230ec976f93161c9c5d89cfdfe042eba')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_registration', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_registration-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', 'd25b578ffb859f07cbad6987d1adb507e357d91f82863faeae337d8645b9d29d')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_io', '5.1.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_io-5.1.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '6ecc528ce4ae42b6d14c972d51055856f130a13474463ba4bf187ed2afdb04b1')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('itk_meshtopolydata', '0.6.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'itk_meshtopolydata-0.6.2-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', '486e418808c27ccc76619737219fa57fcd1de2da5fcd0325ee8c4395965deb39')]), - ('unpack_sources', False), - ('modulename', 'itk'), - ])), - ('pyct', '0.4.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'df7b2d29f874cabdbc22e4f8cba2ceb895c48aa33da4e0fe679e89873e0a4c6e')]), - ])), - ('colorcet', '2.0.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '514813790a74b578c3eaff76b2102274c2ba8b0239c9504586df685223007dee')]), - ])), - ('itkwidgets', '0.32.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '11ee93007cf354405e01bf9a28fb1eb2fb0a5d139e1cdded1a07b47f7ee76972')]), - ])), - ('ujson', '3.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'abb1996ba1c1d2faf5b1e38efa97da7f64e5373a31f705b96fe0587f5f778db4')]), - ])), - # ('jupyterlab_iframe', '0.2.2', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '7c26cddc5f29f0b5ac6ba4707ce811f9787282549d22098b691bcdcc6692238c')]), - # ('use_pip', False), - # ])), - ('python-dotenv', '0.10.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f157d71d5fec9d4bd5f51c82746b6344dffa680ee85217c123f4a0c8117c4544')]), - ('modulename', 'dotenv'), - ])), - # ('ipyauth', '0.2.5', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '189db0e9fd48cf52f333ef8ac1a71b9e3a9eec6561f5085e3d7239ca2588ba87')]), - # ])), - ('jupyterlab_latex', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '722558144e6ce71abe4fba0fb01c13c37271fa2497881bfebe9b6422d1fc795b')]), - ])), - # ('jupyterlab_slurm', '2.0.0', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '87ada82e7746745bcc47d19f3caf54cc18798b058aedcd5d1005e19b784c2d4f')]), - # ])), - ('jupyterlmod', '2.0.2', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://github.com/cmd-ntrf/jupyter-lmod/archive/']), - ('source_tmpl', '%(version)s.tar.gz'), - ('checksums', [('sha256', 'f547432afb30cb87c8dabf78fd4cc4c35ff4b6d2aafbae19c249b7d596015c6a')]), - ('patches', [ - 'jupyterlmod-urlfile.patch', - 'jupyterlmod-packagejson.patch', - ]), - ])), - ('nbresuse', '0.3.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5ba19dd0b08ff19470aeb34cda2f07276a3fb1cd750bc53f3a3e06322664e98e')]), - ])), - ('colorama', '0.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d')]), - ])), - ('nbdime', '2.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4e3efdcfda31c3074cb565cd8e76e2e5421b1c4560c3a00c56f8679dd15590e5')]), - ])), - ('smmap2', '2.0.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '29a9ffa0497e7f2be94ca0ed1ca1aa3cd4cf25a1f6b4f5f87f74b46ed91d609a')]), - ('modulename', 'smmap'), - ])), - ('gitdb2', '2.0.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1b6df1433567a51a4a9c1a5a0de977aa351a405cc56d7d35f3388bad1f630350')]), - ('modulename', 'gitdb'), - ])), - ('GitPython', '3.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3237caca1139d0a7aa072f6735f5fd2520de52195e0fa1d8b83a9b212a2498b2')]), - ('modulename', 'git'), - ])), - ('jupyterlab_git', '0.23.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '20a4954d8d1b1eb6f9111cd15d6a598bd7ff72b08797cf5e86c5a55827c85a1d')]), - ])), - ('sidecar', '0.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3e79de269ddd2c0eff1cb26025cb9f434cf9914b777cf03eeab8347f6de7160e')]), - ])), - ('pycodestyle', '2.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e')]), - ])), - ('autopep8', '1.4.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4d8eec30cc81bc5617dbf1218201d770dc35629363547f17577c61683ccfb3ee')]), - ])), - ('yapf', '0.28.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6f94b6a176a7c114cfa6bad86d40f259bbe0f10cf2fa7f2f4b3596fc5802a41b')]), - ])), - ('isort', '4.3.21', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1')]), - ])), - ('pathspec', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e285ccc8b0785beadd4c18e5708b12bb8fcf529a1e61215b3feff1d1e559ea5c')]), - ])), - ('typed_ast', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a')]), - ])), - ('regex', '2019.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '720e34a539a76a1fedcebe4397290604cc2bdf6f81eca44adb9fb2ea071c0c69')]), - ])), - ('black', '19.3b0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c')]), - ])), - ('jupyterlab_code_formatter', '1.3.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7c415caff528e55027e82c96b60dcab0537aa5cda168997f03a4964c51e2b7b5')]), - ])), - ############### - # extras - ('mccabe', '0.6.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f')]), - ])), - ('pyflakes', '2.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8')]), - ])), - ('flake8', '3.8.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208')]), - ])), - ('pydocstyle', '5.0.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5')]), - ])), - ('rope', '0.16.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd2830142c2e046f5fc26a022fe680675b6f48f81c7fc1f03a950706e746e9dfe')]), - ])), - # base for python language server - ('python-jsonrpc-server', '0.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '62c543e541f101ec5b57dc654efc212d2c2e3ea47ff6f54b2e7dcb36ecf20595')]), - ('modulename', 'pyls_jsonrpc'), - ])), - # test - ('versioneer', '0.18', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ead1f78168150011189521b479d3a0dd2f55c94f5b07747b484fd693c3fbf335')]), - ])), - ('lazy-object-proxy', '1.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0')]), - ])), - ('wrapt', '1.12.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7')]), - ])), - ('astroid', '2.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703')]), - ])), - ('typed_ast', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a')]), - ])), - ('pylint', '2.5.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7dd78437f2d8d019717dbf287772d0b2dbdfd13fc016aa7faa08d67bccc46adc')]), - ])), - ('pytest-cov', '2.10.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e')]), - ])), - ('pytest-xprocess', '0.13.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '779aeca517cd9c996d1544bdc510cb3cff40c48136d94bbce6148e27f30a93ff')]), - ])), - # python language server - ('python-language-server', '0.36.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9984c84a67ee2c5102c8e703215f407fcfa5e62b0ae86c9572d0ada8c4b417b0')]), - ('modulename', 'pyls'), - ])), - ('jupyter-lsp', '0.9.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '73b5cb47fbe672bee6c02ddc4f2fbe728dc3bff589b5741835ebb80a862163af')]), - ])), - - #################### - # Jupyter Hub - # ('SQLAlchemy', '1.3.10', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '0f0768b5db594517e1f5e1572c73d14cf295140756431270d89496dc13d5e46c')]), - # ])), # part of Python module in version 1.3.1 - # ('python-oauth2', '1.1.1', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', 'd7a8544927ac18215ba5317edd8f640a5f1f0593921bcf3ce862178312c8c9a4')]), - # ('modulename', 'oauth2'), - # ])), # part of Python module - ('pamela', '1.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '65c9389bef7d1bb0b168813b6be21964df32016923aac7515bdf05366acbab6c')]), - ])), - ('certipy', '0.1.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '695704b7716b033375c9a1324d0d30f27110a28895c40151a90ec07ff1032859')]), - ])), - ('oauthlib', '3.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889')]), - ])), - ('ruamel.yaml', '0.16.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '412a6f5cfdc0525dee6a27c08f5415c7fd832a7afcb7a0ed7319628aed23d408')]), - ])), - ('ruamel.yaml.clib', '0.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'cee86ecc893a6a8ecaa7c6a9c2d06f75f614176210d78a5f155f8e78d6989509')]), - ('modulename', 'ruamel.yaml'), - ])), - ('python-json-logger', '0.1.11', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b7a31162f2a01965a5efb94453ce69230ed208468b0bbc7fdfc56e6d8df2e281')]), - ('modulename', 'pythonjsonlogger'), - ])), - ('jupyter_telemetry', '0.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '445c613ae3df70d255fe3de202f936bba8b77b4055c43207edf22468ac875314')]), - ])), - ('jupyterhub', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '852a70225a03abd631b36a207f3ffdf69326a0db4cef539825fde39ec1b713d7')]), - ('patches', ['jupyterhub-1.1.0_logoutcookie-2.0.patch']), - # copy 401.html -> <jupyter-install-dir>/share/jupyter/lab/static/ - ])), - ('appmode', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'deac99adaa95e28fa8a56c072be653603cffa49dc06469701ac9c014b690e7c4')]), - ])), - ('HeapDict', '1.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8495f57b3e03d8e46d5f1b2cc62ca881aca392fd5cc048dc0aa2e1a6d23ecdb6')]), - ])), - ('zict', '2.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8e2969797627c8a663575c2fc6fcb53a05e37cdb83ee65f341fc6e0c3d0ced16')]), - ])), - ('tblib', '1.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '059bd77306ea7b419d4f76016aef6d7027cc8a0785579b5aad198803435f882c')]), - ])), - ('sortedcontainers', '2.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1')]), - ])), - ('msgpack', '1.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9534d5cc480d4aff720233411a1f765be90885750b07df772380b34c10ecb5c0')]), - ])), - ('dask', '2.30.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a1669022e25de99b227c3d83da4801f032415962dac431099bf0534648e41a54')]), - ])), - ('cloudpickle', '1.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9bc994f9e9447593bd0a45371f0e7ac7333710fcf64a4eb9834bf149f4ef2f32')]), - ])), - ('distributed', '2.30.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1421d3b84a0885aeb2c4bdc9e8896729c0f053a9375596c9de8864e055e2ac8e')]), - ])), - ('dask-jobqueue', '0.7.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1767f4146b2663d9d2eaef62b882a86e1df0bccdb8ae68ae3e5e546aa6796d35')]), - ])), - ('dask_labextension', '3.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c613f5c76b8fce4fae167eeab3377e0706e5045a27da1200b3b173025a94d94b')]), - ])), - ('Automat', '0.8.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '269a09dfb063a3b078983f4976d83f0a0d3e6e7aaf8e27d8df1095e09dc4a484')]), - ])), - ('PyHamcrest', '1.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8ffaa0a53da57e89de14ced7185ac746227a8894dbd5a3c718bf05ddbd1d56cd')]), - ('modulename', 'hamcrest'), - ])), - ('pyasn1-modules', '0.2.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e')]), - ])), - ('service_identity', '18.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0858a54aabc5b459d1aafa8a518ed2081a285087f349fe3e55197989232e2e2d')]), - ])), - ('Twisted', '19.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd5db93026568f60cacdc0615fcd21d46f694a6bfad0ef3ff53cde2b4bb85a39d')]), - ('source_tmpl', '%(name)s-%(version)s.tar.bz2'), - ])), - ('autobahn', '19.10.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '734385b00547448b3f30a752cbfd2900d15924d77dc4a1699b8bce1ea8899f39')]), - ])), - ('constantly', '15.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35')]), - ])), - ('hyperlink', '19.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '4288e34705da077fada1111a24a0aa08bb1e76699c9ce49876af722441845654')]), - ])), - ('incremental', '17.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3')]), - ])), - ('txaio', '18.8.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '67e360ac73b12c52058219bb5f8b3ed4105d2636707a36a7cdafb56fe06db7fe')]), - ])), - ('zope.interface', '4.7.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'fd1101bd3fcb4f4cf3485bb20d6cb0b56909b94d3bd2a53a6cb9d381c3da3365')]), - ])), - ('wslink', '0.1.13', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6983114e978717a2681815b1ef4b8a0fa2c80c89c6ed09b9554a36c5869cf935')]), - ])), - ('jupyterlab_pygments', '0.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146')]), - ])), - # ('jupyterlab-nvdashboard', '0.4.0', dict(list(local_common_opts.items()) + [ - # # throughs too many errors if nvtools or driver are not installed - # ('checksums', [('sha256', 'ff12f20076e8cb52ea3319ccf148458ba6230611b52b835b0eeea10ba37f7fe1')]), - # ])), - ('ipyvue', '1.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e8549a7ac7dc45948a5f2735e17f97622313c7fea24ea3c1bd4a5ebf02bf5638')]), - ])), - ('ipyvuetify', '1.6.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd0cfa23ae5a012b0b4f1772a2b2c472f2cc705e9357e16fbe74bf247d2d979f1')]), - ])), - ('voila', '0.2.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e4b20c81e61f35b59958070e26d2843eb0aaaa25855b180e06e85b59f6dec02d')]), - ])), - ('voila-material', '0.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0827a27f0f23ca87bd8f565c4c227c754516d2a120ffce0f7ab1ee12fdec959f')]), - ('modulename', 'voila'), # fake module name to trick the sanity check - ])), - ('voila-gridstack', '0.0.12', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7c73e4373abc953cb16f708eafe6b09b11ecae7f130c2db233e184907894fdae')]), - ('modulename', 'voila'), - ])), - ('voila-vuetify', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '522f4ec91eb3912a21215b6b04ad58070d1675809d7d4289ca8e42b577fde100')]), - ('modulename', 'voila'), - ])), - ('pydicom', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9488495a9f3e202d1219f7cb165aee0de31efd512d938c0226d34147ce860391')]), - ])), - ('dicom_upload', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd03f309bbae2094d3db75ffaa9753cca5982d2096ec55720a1f54343cc4a6877')]), - ])), - ('jsfileupload', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '245cd74a3c2ed4356df9a33d0072d8ab295b60b6fdfd69c6795396d455fc8a77')]), - ])), - ('pvlink', '0.3.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a2d5f2c204e0e779a5b865742019b4646b8592d76de87cac724dc84f64eaf80f')]), - ])), - ('textwrap3', '0.9.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'textwrap3-0.9.2.zip'), - ('checksums', [('sha256', '5008eeebdb236f6303dcd68f18b856d355f6197511d952ba74bc75e40e0c3414')]), - ('use_pip', True), - ])), - ('ansiwrap', '0.8.4', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'ansiwrap-0.8.4.zip'), - ('checksums', [('sha256', 'ca0c740734cde59bf919f8ff2c386f74f9a369818cdc60efe94893d01ea8d9b7')]), - ])), - ('backports.weakref', '1.0.post1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2')]), - ])), - ('backports.tempfile', '1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1c648c452e8770d759bdc5a5e2431209be70d25484e1be24876cf2168722c762')]), - ])), - ('tqdm', '4.56.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'fe3d08dd00a526850568d542ff9de9bbc2a09a791da3c334f3213d8d0bbbca65')]), - ])), - ('tenacity', '6.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '72f397c2bb1887e048726603f3f629ea16f88cb3e61e4ed3c57e98582b8e3571')]), - ])), - ('papermill', '2.2.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1c452b1c5a9ab52b94c99d8b7705ae7173f6aa88a3d28a5d30cffba48a46f5b6')]), - ])), - ('pyviz_comms', '0.7.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '712df4cca33dda351de754742b24361eee8e4b7c1cfb0e24f50dcb802fa25624')]), - ])), - ('Markdown', '3.2.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1fafe3f1ecabfb514a5285fca634a53c1b32a81cb0feb154264d55bf2ff22c17')]), - ('modulename', 'markdown'), - ])), - ('panel', '0.9.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '53340615f30f67f3182793695ebe52bf25e7bbb0751aba6f29763244350d0f42')]), - ])), - ('holoviews', '1.13.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'cf2dd426cdc08bf2ca669268ca0432634574429e5b143314ee06ad88c5382802')]), - ])), - # PythonPackages for Tutorials - ('patsy', '0.5.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f115cec4201e1465cd58b9866b0b0e7b941caafec129869057405bfe5b5e3991')]), - ])), - ('statsmodels', '0.10.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9cd2194c6642a8754e85f9a6e6912cdf996bebf6ff715d3cc67f65dadfd37cc9')]), - ])), - ('cftime', '1.2.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f')]), - ])), - ('vega_datasets', '0.8.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'db8883dab72b6f414e1fafdbf1e8db7543bba6ed77912a4e0c197d74fcfa1c20')]), - ])), - ('Theano', '1.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '35c9bbef56b61ffa299265a42a4e8f8cb5a07b2997dabaef0f8830b397086913')]), - ('modulename', 'theano'), - ])), - ('altair', '3.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9f7c521239ac5a207c3cffc29c5bdde0854fff0dec0b5f91f086ba8e5f1de8a9')]), - ])), - ('cssselect', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f95f8dedd925fd8f54edb3d2dfb44c190d9d18512377d3c1e2388d16126879bc')]), - ])), - ('smopy', '0.0.7', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '578b5bc2502176d210f176ab94e77974f43b32c95cd0768fb817ea2499199592')]), - ])), - ('memory_profiler', '0.55.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5fa47b274c929dd2cbcd9190afb62fec110701251d2ac2d301caaf545c81afc1')]), - ])), - ('line_profiler', '3.1.0', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'line_profiler-3.1.0-cp38-cp38-manylinux1_x86_64.whl'), - ('checksums', [('sha256', 'a66e089e6d98ab8a70b5f89c0367c6780abad0f0b1d624dbe5edd8f0083986c7')]), - ('unpack_sources', False), # whl package instead of tar.gz because building with skbuild->cmake fails - ])), - ('xarray', '0.16.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '38e8439d6c91bcd5b7c0fca349daf8e0643ac68850c987262d53526e9d7d01e4')]), - ])), - ('arviz', '0.10.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'dc1f08ce02459989dd4f41fa6012e736cf2ba00ee44b29bebe0a451c58a68e42')]), - ])), - ('pymc3', '3.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1bb2915e4a29877c681ead13932b0b7d276f7f496e9c3f09ba96b977c99caf00')]), - ])), - ('numexpr', '2.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b0d239d9827e1bcee08344fd05835823bc60aff97232e35a928214d03ff802b1')]), - ])), - ('ipythonblocks', '1.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ba923cb7a003bddee755b5a7ac9e046ffc093a04b0bdede8a0a51ef900aed0ba')]), - ])), - ('pydub', '0.23.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c362fa02da1eebd1d08bd47aa9b0102582dff7ca2269dbe9e043d228a0c1ea93')]), - ])), - ('multipledispatch', '0.6.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a7ab1451fd0bf9b92cab3edbd7b205622fb767aeefb4fb536c2e3de9e0a38bea')]), - ])), - ('partd', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6e258bf0810701407ad1410d63d1a15cfd7b773fd9efe555dac6bb82cc8832b0')]), - ])), - ('locket', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1fee63c1153db602b50154684f5725564e63a0f6d09366a1cb13dffcec179fb4')]), - ])), - ('fsspec', '0.6.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ffd7cd5ac32f36698097c3d78c2c433d4c12f7e4bce3a3a4036fd3491188046d')]), - ])), - ('datashape', '0.5.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2356ea690c3cf003c1468a243a9063144235de45b080b3652de4f3d44e57d783')]), - ])), - ('datashader', '0.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b1f80415f72f92ccb660aaea7b2881ddd35d07254f7c44101709d42e819d6be6')]), - ])), - ('selenium', '3.141.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'deaf32b60ad91a4611b98d8002757f29e6f2c2d5fcaf202e1c9ad06d6772300d')]), - ])), - ('graphviz', '0.13.2', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'graphviz-0.13.2.zip'), - ('checksums', [('sha256', '60acbeee346e8c14555821eab57dbf68a169e6c10bce40e83c1bf44f63a62a01')]), - ])), - ('vincent', '0.4.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5765bcd360140d2304e52728ad1d4382f3f919ea259a13932828680f2d84fcd3')]), - ])), - ('tailer', '0.4.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '78d60f23a1b8a2d32f400b3c8c06b01142ac7841b75d8a1efcb33515877ba531')]), - ])), - # Dash - ('Flask-Compress', '1.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '468693f4ddd11ac6a41bca4eb5f94b071b763256d54136f77957cfee635badb3')]), - ])), - ('hiredis', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '996021ef33e0f50b97ff2d6b5f422a0fe5577de21a8873b58a779a5ddd1c3132')]), - ])), - ('redis', '3.5.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2')]), - ])), - ('Flask-Caching', '1.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a0356ad868b1d8ec2d0e675a6fe891c41303128f8904d5d79e180d8b3f952aff')]), - ])), - ('dash', '1.16.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'fdfe2a16c4041c7769e06e4e0eaaeb65cfd8351797ed4b37efe001384637b6c7')]), - ])), - ('dash_renderer', '1.8.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '736f8e0dcce244699457b54a2ee262a04baf389db1a8f54e98c949bb3e7e487f')]), - ])), - ('dash_core_components', '1.12.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '803b54d01ca48b90f3fc1652f7be9f818ed2882da8975a51b99fc2d77dd2727e')]), - ])), - ('dash_html_components', '1.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2c662e640528c890aaa0fa23d48e51c4d13ce69a97841d856ddcaaf2c6a47be3')]), - ])), - ('dash_table', '4.10.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1aa02180faef13434364286b60404d26164d1ce2779c765c9c52e6935991a4e9')]), - ])), - ('dash-bootstrap-components', '0.10.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '40500e692f59648dbc024b9e921a1e1410ee0bc97cfa963990e54e42523679b4')]), - ])), - ('dash_daq', '0.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a1d85b6799f7b885652fbc44aebdb58c41254616a8d350b943beeb42ade4256a')]), - ])), - ('dash_player', '0.0.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '46114910b497f35f1aa496ed8b9ff1457d07c96171227961b671ba4164c537a0')]), - ])), - ('dash_canvas', '0.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '72fcfb37e1c0f68c08f6fa6cf0b5be67ecc66fcfb5253231ffc450957b640b31')]), - ])), - ('dash_bio', '0.4.8', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a6f480b060488ab9be26129562a7bcb491ef40ae0ffae46f499fb7d5997de06c')]), - ])), - ('dash_cytoscape', '0.2.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0669c79c197e4b150a5db7a278d1c7acebc947f3f5cbad5274835ebb44f712cd')]), - ])), - ('ansi2html', '1.5.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '96ae85ae7b26b7da674d87de2870ba4d1964bca733ae4614587080b6358c3ba9')]), - ])), - ('jupyter-dash', '0.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'eb5eb42ec8cb5e3388d41d895b5ef6e66812e3345cb271cc374318a1a589e687')]), - ])), - # more - ('fastcore', '1.3.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '550c201f6a0f7001994898260588e3df310968165c43ecc4fa3062dd6eee5956')]), - ])), - ('fastscript', '1.0.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '67d2315a508ffd0499af590fffaa63d276ce6eaff73ffbd60eb3315ba38d08fa')]), - ])), - # ('nbdev', '1.1.5', dict(list(local_common_opts.items()) + [ - # # nbdev < 2 needs nbconvert < 6, will change with nbdev >= 2 - # ('checksums', [('sha256', '0ce349625514e2865bbc023db597d791c45b572a7bbc8a49b320a327b9e7b721')]), - # ])), - ('PyJWT', '1.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96')]), - ('modulename', 'jwt'), - ])), - ('pyunicore', '0.9.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '10c091812d90e86f1371fe1373dada86be6afac41da44b762f0f5f86f9ddc3ae')]), - ])), - - # misc - # ('rpy2', '3.2.4', dict(list(local_common_opts.items()) + [ - # ('checksums', [('sha256', '3daf1a4b28c4e354ef989093f03b066908bf6e5082a6f4af72cc3fd928a28dc6')]), - # ])), -] - -local_jupyter_config_path = 'etc/jupyter' -local_jupyter_path = 'share/jupyter' -local_jupyterlab_dir = 'share/jupyter/lab' - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - # search path to find installable data files, such as kernelspecs and notebook extensions - 'JUPYTER_PATH': [local_jupyter_path], - 'JUPYTERLAB_DIR': [local_jupyterlab_dir], - # do NOT set JUPYTER_CONFIG_DIR: if not set, if will be ${HOME}/.jupyter, which is just right - 'JUPYTER_CONFIG_PATH': [local_jupyter_config_path] # config dir at install time. - # ATTENTION: not config dir at runtime, because this is fixed to {sys.prefix}/etc/jupyter/ -} - -modextravars = { - 'JUPYTER': '%(installdir)s/bin/jupyter', - 'MKL_THREADING_LAYER': 'GNU', # https://github.com/Theano/Theano/issues/6568 -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is fist entry in JUPYTHER_PATH and JUPYTER_DATA_DIR -# and the JUPYTER_CONFIG_PATH starts with $HOME/.jupyter -# https://jupyter.readthedocs.io/en/latest/projects/jupyter-directories.html#envvar-JUPYTER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -prepend_path("JUPYTER_DATA_DIR", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -prepend_path("JUPYTER_CONFIG_PATH", pathJoin(os.getenv("HOME"), ".jupyter")) -""" - -postinstallcmds = [ - # ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - - 'echo "#!/bin/bash" > %(builddir)s/env.sh', - 'echo "source %(installdir)s/bin/activate" >> %(builddir)s/env.sh', - ( - 'echo "export PYTHONPATH=' - '%(installdir)s/lib/python3.8/site-packages:${EBROOTPYTHON}/lib/python3.8/site-packages:${PYTHONPATH}"' - ' >> %(builddir)s/env.sh' - ), - # Jupyter Paths - http://jupyter.readthedocs.io/en/latest/projects/jupyter-directories.html - 'echo "export JUPYTER=%(installdir)s/bin/jupyter" >> %(builddir)s/env.sh', - 'echo "export JUPYTER_PATH=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyter_path, - 'echo "export JUPYTERLAB_DIR=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyterlab_dir, - # Config dir at install time. ATTENTION: not config dir at runtime. This is picked up by JUPYTER_CONFIG_PATH - 'echo "export JUPYTER_CONFIG_DIR=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyter_config_path, - # jupyter will use $JUPYTER_CONFIG_DIR with "--user" - 'echo "export JUPYTER_DATA_DIR=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyter_path, - 'echo "export PATH=%(installdir)s/bin:${PATH}" >> %(builddir)s/env.sh', - - # NodeJS packages - 'source %(builddir)s/env.sh && npm install -g phantomjs-prebuilt@2.1.16', # req. export_png/export_svg of bokeh - - # Jupyter Notebook Extensions - 'source %(builddir)s/env.sh && jupyter contrib nbextension install --user', - 'source %(builddir)s/env.sh && jupyter nbextensions_configurator enable --user', - 'source %(builddir)s/env.sh && jupyter nbextension enable codefolding/main --user', - - ('source %(builddir)s/env.sh && ' - ' jupyter nbextension install widgetsnbextension --py --nbextensions=${JUPYTER_PATH}/nbextensions'), - 'source %(builddir)s/env.sh && jupyter nbextension enable widgetsnbextension --py --user', - - 'source %(builddir)s/env.sh && jupyter nbextension enable ipyvuetify --py --user', - - 'source %(builddir)s/env.sh && jupyter nbextension install rise --py --nbextensions=${JUPYTER_PATH}/nbextensions', - 'source %(builddir)s/env.sh && jupyter nbextension enable rise --py --user', - - 'source %(builddir)s/env.sh && jupyter nbextension enable appmode --py --user', - 'source %(builddir)s/env.sh && jupyter serverextension enable appmode --py', - - # Jupyter Notebook Extensions - 'source %(builddir)s/env.sh && jupyter labextension install @jupyter-widgets/jupyterlab-manager@2.0.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyter-widgets/jupyterlab-sidecar@0.5.0 --no-build', - - # 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/server-proxy@2.1.1 --no-build', - ('source %(builddir)s/env.sh && ' - ' cd %(builddir)s/jupyterserverproxy/jupyter-server-proxy-1.5.2/jupyterlab-server-proxy/ && ' - ' npm install && ' # install npm package dependencies incurrent directory - ' npm run build && ' # optional build step if using TypeScript, babel, etc. - ' jupyter labextension install --no-build'), # install the current directory as an extension' - # 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/github@2.0.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/latex@2.0.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/git@0.23.3 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/toc@4.0.0 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-datawidgets@6.3.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-plotly@4.14.3 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-gitlab@2.0.0 --no-build', - # 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab_iframe@0.2.2 --no-build', - - # 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-lmod@0.8.1 --no-build', - # this might interfer with Xpra-icon: https://github.com/cmd-ntrf/jupyter-lmod/issues/30 - ('source %(builddir)s/env.sh && ' - ' cd %(builddir)s/jupyterlmod/jupyter-lmod-2.0.2/jupyterlab/ && ' - ' npm install && ' # install npm package dependencies incurrent directory - ' npm run build && ' # optional build step if using TypeScript, babel, etc. - ' jupyter labextension install --no-build'), # install the current directory as an extension - - # 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-nvdashboard@0.4.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-dash@0.4.0 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-topbar-extension@0.5.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-system-monitor@0.6.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-theme-toggle@0.5.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-control@1.1.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-favorites@2.0.0 --no-build', - # 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-slurm@2.0.0 --no-build', - - # store default in ../share/jupyter/lab/schemas/jupyterlab-favorites/favorites.json - # 'source %(builddir)s/env.sh && jupyter labextension install jupyterlab-tour@2.1.1 --no-build', - # tour-state on/off is saved in workspace-file in ~/.jupyter/lab/workspaces/ - - # respect the version lookup table at https://github.com/matplotlib/ipympl - 'source %(builddir)s/env.sh && jupyter labextension install jupyter-matplotlib@0.7.4 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install jupyter-leaflet@0.13.3 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyter-threejs@2.2.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jupyter-vuetify@1.6.1 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install nbdime-jupyterlab@2.0.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install dask-labextension@3.0.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install ipyvolume@0.6.0-alpha.5 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install plotlywidget@4.14.3 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install bqplot@0.5.22 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install dicom-upload@0.2.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install jsfileupload@0.2.0 --no-build', - - # 'source %(builddir)s/env.sh && jupyter labextension install @jupyterlab/hdf5@0.5.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @ijmbarr/jupyterlab_spellchecker@0.2.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @pyviz/jupyterlab_pyviz@1.0.4 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @bokeh/jupyter_bokeh@2.0.4 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @jupyter-voila/jupyterlab-preview@1.1.0 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install @parente/jupyterlab-quickopen@0.5.0 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install @ryantam626/jupyterlab_code_formatter@1.3.8 --no-build', - ('source %(builddir)s/env.sh && ' - ' jupyter labextension install @krassowski/jupyterlab_go_to_definition@1.0.0 --no-build'), - 'source %(builddir)s/env.sh && jupyter labextension install @krassowski/jupyterlab-lsp@2.1.3 --no-build', - - 'source %(builddir)s/env.sh && jupyter labextension install pvlink@0.3.1 --no-build', - 'source %(builddir)s/env.sh && jupyter labextension install itkwidgets@0.32.0 --no-build', - - # build JupyterLab app directory for all previous installed extensions in one go - 'source %(builddir)s/env.sh && jupyter lab build --dev-build=False', # --minimize=False - - # jupyterlab server extensions - 'source %(builddir)s/env.sh && jupyter serverextension enable jupyter_server_proxy --py', - 'source %(builddir)s/env.sh && jupyter serverextension enable jupyterlab_code_formatter --py', - # 'source %(builddir)s/env.sh && jupyter serverextension enable jupyterlab_sql --py', - # 'source %(builddir)s/env.sh && jupyter serverextension enable jupyterlab_iframe --py', - 'source %(builddir)s/env.sh && jupyter serverextension enable jupyterlab_git --py', - 'source %(builddir)s/env.sh && jupyter serverextension enable jupyter_lsp --py', - 'source %(builddir)s/env.sh && jupyter serverextension enable dask_labextension --py', - - # configure jupyterlab extensions - - # Send2Trash - # disable - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - 'c.FileContentsManager.delete_to_trash = False\n' - 'EOF' - ), - - # GitLab-extension - # for security reasons access-token must be set in the server extension: - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - '# no username+password needed, if repo is public or we have the token for a specific URL\n' - '# c.GitLabConfig.access_token = "<API-TOKEN>"\n' - '# c.GitLabConfig.allow_client_side_access_token = False\n' - 'c.GitLabConfig.url = "https://gitlab.version.fz-juelich.de"\n' - 'c.GitLabConfig.validate_cert = True\n' - 'EOF' - ), - - # GitHub-extension - # for security reasons access-token must be set in the server extension: - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - '# no username+password needed, if repo is public or we have the token for a specific URL\n' - '# c.GitHubConfig.access_token = "<API-TOKEN>"\n' - '# c.GitHubConfig.allow_client_side_access_token = False\n' - 'c.GitHubConfig.url = "https://github.com"\n' - 'c.GitHubConfig.validate_cert = True\n' - 'EOF' - ), - - # iframe-extension - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - '# c.JupyterLabIFrame.iframes = [\'list\', \'of\', \'sites\']\n' - 'c.JupyterLabIFrame.welcome = "http://www.fz-juelich.de/jsc"\n' - 'EOF' - ), - - # define .ipynb_checkpoints permissions - ( - '{ cat >> %(installdir)s/etc/jupyter/jupyter_notebook_config.py; } << \'EOF\'\n' - 'c.FileCheckpoints.checkpoint_permissions = 0o664\n' - 'c.FileCheckpoints.restore_permissions = 0o644\n' - 'c.FileCheckpoints.checkpoint_dir_umask = 0o002\n' - 'EOF' - ), - - # modify the installation files, which would be overwritten if done before (must be last commands to run) - ( - 'cp %(builddir)s/jupyterlabgitlab/jupyterlab-gitlab-2.0.0/schema/drive.json' - ' %(installdir)s/share/jupyter/lab/schemas/jupyterlab-gitlab/drive.json' - ), - - # Add the overrides file - ( - '{ cat > %(builddir)s/file_jupyter-overrides_jsc.patch; } << \'EOF\'\n' - 'diff -Naur share.orig/jupyter/lab/settings/overrides.json share/jupyter/lab/settings/overrides.json\n' - '--- share.orig/jupyter/lab/settings/overrides.json 1970-01-01 01:00:00.000000000 +0100\n' - '+++ share/jupyter/lab/settings/overrides.json 2019-11-26 13:40:46.560731000 +0100\n' - '@@ -0,0 +1,8 @@\n' - '+{\n' - '+ "jupyterlab-gitlab:drive": {\n' - '+ "baseUrl": "https://gitlab.version.fz-juelich.de"\n' - '+ },\n' - '+ "@parente/jupyterlab-quickopen:plugin": {\n' - '+ "relativeSearch": true\n' - '+ }\n' - '+}\n' - 'EOF' - ), - 'patch -p0 -d %(installdir)s < %(builddir)s/file_jupyter-overrides_jsc.patch', - - # add webpage, which leads back to https://jupyter-jsc.fz-juelich.de - 'cp %%(builddir)s/jupyterlab/jupyterlab-%s/401.html %%(installdir)s/share/jupyter/lab/static/' % local_jlab_version, - - # ################################################### - # IMPORTANT: - # start JupyterLab once (for 60 seconds) to allow some cleanup at first start - # ################################################### - ( - 'source %(builddir)s/env.sh && ' - '{(jupyter lab --no-browser) & } && JLAB_PID=$! && ' - 'sleep 60 && ' - 'jupyter notebook list --json | grep $JLAB_PID | ' - 'awk \'{for(i=1;i<=NF;i++)if($i=="\\"port\\":")print $(i+1)}\' | sed \'s/,*$//g\' | ' - 'xargs -i jupyter notebook stop {}' - ), - - # Ensure Jupyter does not want to build anything on startup - # The build_config.json file is used to track the local directories that have been installed - # using jupyter labextension install <directory>, as well as core extensions that have been explicitly uninstalled. - # 'if [ -e %(installdir)s/share/jupyter/lab/settings/build_config.json ]; then exit 1; fi ', - ( - '{ cat > %(installdir)s/share/jupyter/lab/settings/build_config.json; } << \'EOF\'\n' - '{\n' - ' "local_extensions": {}\n' - '}\n' - 'EOF' - ), - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm -f %(installdir)s/pyvenv.cfg', - 'rm -f %(installdir)s/bin/python', - 'rm -f %(installdir)s/bin/python3', - 'rm -f %(installdir)s/bin/activate', - 'rm -f %(installdir)s/bin/activate*', - 'rm -f %(installdir)s/bin/easy_install*', - 'rm -f %(installdir)s/bin/pip*', - - # Compile Python files to byte-code to speedup execution - # ERROR: returns with exit code 1, because some files cannot be compiled for different reasons - # ################################################### - # Disable possible, because sanity check will # - # force the compile of all python packages anyway# - # ################################################### - # 'source %(builddir)s/env.sh && python -m compileall %(installdir)s', - - # ################################################### - # IMPORTANT: must be done manual after eb-install: # - # ################################################### - # 'chmod -R g-w %(installdir)s ', # software-group must not modify the installation on accident - # 'chmod -R ugo-w %(installdir)s/share ', # Noone should add files/configs to the global share after install - # 'chmod -R ug-w ...../2020/software/Python/3.8.5-GCCcore-9.3.0/share ', # Python module, too -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True # would result in sanity-errors about yaml,ipython_genutils,IPython,traitlets -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/Jupyter/jupyter_contrib_nbextensions-template_paths.patch b/Golden_Repo/j/Jupyter/jupyter_contrib_nbextensions-template_paths.patch deleted file mode 100644 index c51dfd38ddbf859c8282aa48875224addaf60f38..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/jupyter_contrib_nbextensions-template_paths.patch +++ /dev/null @@ -1,121 +0,0 @@ -diff -Naur jupyter_contrib_nbextensions.orig/CHANGELOG.md jupyter_contrib_nbextensions/CHANGELOG.md ---- jupyter_contrib_nbextensions.orig/CHANGELOG.md 2020-11-22 12:43:10.086824740 +0100 -+++ jupyter_contrib_nbextensions/CHANGELOG.md 2020-11-22 12:47:11.839564000 +0100 -@@ -21,6 +21,9 @@ - This is where each new PR to the project should add a summary of its changes, - which makes it much easier to fill in each release's changelog :) - -+- Replace `template_path` with `template_paths` [#1532](https://github.com/ipython-contrib/jupyter_contrib_nbextensions/pull/1532). Nbconvert 6.0 replaced `template_path` with `template_paths` (see https://nbconvert.readthedocs.io/en/latest/changelog.html#significant-changes). This change in Nbconvert 6.0 causes errors in jupyter_latex_envs and in jupyter_contrib_nbextensions (see [#1529](https://github.com/ipython-contrib/jupyter_contrib_nbextensions/issues/1529). -+- Update `install_requires` list in `setup.py` with 'nbconvert >=6.0' -+ - 0.5.1 - ----- - -diff -Naur jupyter_contrib_nbextensions.orig/setup.py jupyter_contrib_nbextensions/setup.py ---- jupyter_contrib_nbextensions.orig/setup.py 2020-11-22 12:43:10.325780000 +0100 -+++ jupyter_contrib_nbextensions/setup.py 2020-11-22 12:47:11.842264000 +0100 -@@ -67,7 +67,7 @@ - 'jupyter_highlight_selected_word >=0.1.1', - 'jupyter_latex_envs >=1.3.8', - 'jupyter_nbextensions_configurator >=0.4.0', -- 'nbconvert >=4.2', -+ 'nbconvert >=6.0', - 'notebook >=4.0', - 'pyyaml', - 'tornado', -@@ -81,7 +81,7 @@ - 'pip', - 'requests', - ], -- 'test:python_version == "2.7"': [ -+ 'test:python_version == "3.8"': [ - 'mock', - ], - }, -diff -Naur jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/config_scripts/highlight_html_cfg.py jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/config_scripts/highlight_html_cfg.py ---- jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/config_scripts/highlight_html_cfg.py 2020-11-22 12:43:10.330209000 +0100 -+++ jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/config_scripts/highlight_html_cfg.py 2020-11-22 12:47:11.799365000 +0100 -@@ -7,7 +7,7 @@ - - c = get_config() # noqa - c.NbConvertApp.export_format = "html" --c.Exporter.template_path = [ -+c.Exporter.template_paths = [ - '.', - jupyter_contrib_nbextensions.nbconvert_support.templates_directory(), - os.path.join(jcnbe_dir, 'nbextensions', 'highlighter') -diff -Naur jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/config_scripts/highlight_latex_cfg.py jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/config_scripts/highlight_latex_cfg.py ---- jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/config_scripts/highlight_latex_cfg.py 2020-11-22 12:43:10.331124000 +0100 -+++ jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/config_scripts/highlight_latex_cfg.py 2020-11-22 12:47:11.801863000 +0100 -@@ -7,7 +7,7 @@ - - c = get_config() # noqa - c.NbConvertApp.export_format = "latex" --c.Exporter.template_path = [ -+c.Exporter.template_paths = [ - '.', - jupyter_contrib_nbextensions.nbconvert_support.templates_directory(), - os.path.join(jcnbe_dir, 'nbextensions', 'highlighter') -diff -Naur jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/install.py jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/install.py ---- jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/install.py 2020-11-22 12:43:10.332127000 +0100 -+++ jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/install.py 2020-11-22 12:47:11.804683000 +0100 -@@ -124,7 +124,7 @@ - if logger: - logger.info('-- Configuring nbconvert template path') - # our templates directory -- _update_config_list(config, 'Exporter.template_path', [ -+ _update_config_list(config, 'Exporter.template_paths', [ - '.', - jupyter_contrib_nbextensions.nbconvert_support.templates_directory(), - ], install) -diff -Naur jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/migrate.py jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/migrate.py ---- jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/migrate.py 2020-11-22 12:43:10.333126000 +0100 -+++ jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/migrate.py 2020-11-22 12:47:11.807881000 +0100 -@@ -128,7 +128,7 @@ - config = Config(cm.get(config_basename)) - if config and logger: - logger.info('- Removing old config values from {}'.format(config_path)) -- _update_config_list(config, 'Exporter.template_path', [ -+ _update_config_list(config, 'Exporter.template_paths', [ - '.', os.path.join(jupyter_data_dir(), 'templates'), - ], False) - _update_config_list(config, 'Exporter.preprocessors', [ -diff -Naur jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/nbconvert_support/exporter_inliner.py jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/nbconvert_support/exporter_inliner.py ---- jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/nbconvert_support/exporter_inliner.py 2020-11-22 12:43:10.337543000 +0100 -+++ jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/nbconvert_support/exporter_inliner.py 2020-11-22 12:47:11.811491000 +0100 -@@ -39,8 +39,8 @@ - templates_directory) - contrib_templates_dir = templates_directory() - -- template_path = c.TemplateExporter.setdefault('template_path', []) -- if contrib_templates_dir not in template_path: -- template_path.append(contrib_templates_dir) -+ template_paths = c.TemplateExporter.setdefault('template_paths', []) -+ if contrib_templates_dir not in template_paths: -+ template_paths.append(contrib_templates_dir) - - return c -diff -Naur jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/nbconvert_support/toc2.py jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/nbconvert_support/toc2.py ---- jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/nbconvert_support/toc2.py 2020-11-22 12:45:58.854592000 +0100 -+++ jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/nbconvert_support/toc2.py 2020-11-22 12:47:11.814530000 +0100 -@@ -52,7 +52,7 @@ - templates_directory) - c.merge(super(TocExporter, self).default_config) - -- c.TemplateExporter.template_path = [ -+ c.TemplateExporter.template_paths = [ - '.', - templates_directory(), - ] -diff -Naur jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/nbextensions/runtools/readme.md jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/nbextensions/runtools/readme.md ---- jupyter_contrib_nbextensions.orig/src/jupyter_contrib_nbextensions/nbextensions/runtools/readme.md 2020-11-22 12:43:10.871469000 +0100 -+++ jupyter_contrib_nbextensions/src/jupyter_contrib_nbextensions/nbextensions/runtools/readme.md 2020-11-22 12:47:11.816660000 +0100 -@@ -78,7 +78,7 @@ - ``` - - The template needs to be in a path where nbconvert can find it. This can be your local path or specified in --`jupyter_nbconvert_config` or `jupyter_notebook_config` as `c.Exporter.template_path`, see [Jupyter docs](https://jupyter-notebook.readthedocs.io/en/latest/config.html). -+`jupyter_nbconvert_config` or `jupyter_notebook_config` as `c.Exporter.template_paths`, see [Jupyter docs](https://jupyter-notebook.readthedocs.io/en/latest/config.html). - - For HTML export a template is provided as `nbextensions.tpl` in the `jupyter_contrib_nbextensions` templates directory. Alternatively you can create your own template: - ``` diff --git a/Golden_Repo/j/Jupyter/jupyter_latex_envs-template_paths.patch b/Golden_Repo/j/Jupyter/jupyter_latex_envs-template_paths.patch deleted file mode 100644 index 451f439ae2cb32668895a3c8b77a58896b8e8040..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/jupyter_latex_envs-template_paths.patch +++ /dev/null @@ -1,48 +0,0 @@ -diff -Naur jupyter_latex_envs.orig/src/latex_envs/latex_envs.py jupyter_latex_envs/src/latex_envs/latex_envs.py ---- jupyter_latex_envs.orig/src/latex_envs/latex_envs.py 2020-11-22 17:04:43.617982112 +0100 -+++ jupyter_latex_envs/src/latex_envs/latex_envs.py 2020-11-22 17:05:35.063217000 +0100 -@@ -301,12 +301,12 @@ - ) - c.merge(super(LenvsHTMLExporter, self).default_config) - if os.path.isdir(os.path.join(os.path.dirname(__file__), 'templates')): -- c.TemplateExporter.template_path = ['.', -+ c.TemplateExporter.template_paths = ['.', - os.path.join(os.path.dirname(__file__), 'templates')] - else: - from jupyter_contrib_nbextensions.nbconvert_support import ( - templates_directory) -- c.TemplateExporter.template_path = ['.', templates_directory()] -+ c.TemplateExporter.template_paths = ['.', templates_directory()] - - return c - -@@ -364,12 +364,12 @@ - ) - c.merge(super(LenvsSlidesExporter, self).default_config) - if os.path.isdir(os.path.join(os.path.dirname(__file__), 'templates')): -- c.TemplateExporter.template_path = ['.', -+ c.TemplateExporter.template_paths = ['.', - os.path.join(os.path.dirname(__file__), 'templates')] - else: - from jupyter_contrib_nbextensions.nbconvert_support import ( - templates_directory) -- c.TemplateExporter.template_path = ['.', templates_directory()] -+ c.TemplateExporter.template_paths = ['.', templates_directory()] - - return c - -@@ -483,12 +483,12 @@ - c.merge(super(LenvsLatexExporter, self).default_config) - - if os.path.isdir(os.path.join(os.path.dirname(__file__), 'templates')): -- c.TemplateExporter.template_path = ['.', -+ c.TemplateExporter.template_paths = ['.', - os.path.join(os.path.dirname(__file__), 'templates')] - else: - from jupyter_contrib_nbextensions.nbconvert_support import ( - templates_directory) -- c.TemplateExporter.template_path = ['.', templates_directory()] -+ c.TemplateExporter.template_paths = ['.', templates_directory()] - return c - - def tocrefrm(self, text): diff --git a/Golden_Repo/j/Jupyter/jupyterhub-1.1.0_logoutcookie-2.0.patch b/Golden_Repo/j/Jupyter/jupyterhub-1.1.0_logoutcookie-2.0.patch deleted file mode 100644 index dc420a9b2607b58f666925f4cb608a94298e7c15..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/jupyterhub-1.1.0_logoutcookie-2.0.patch +++ /dev/null @@ -1,123 +0,0 @@ -diff -Naur jupyterhub-1.1.0.orig/jupyterhub/services/auth.py jupyterhub-1.1.0/jupyterhub/services/auth.py ---- jupyterhub-1.1.0.orig/jupyterhub/services/auth.py 2019-12-03 10:13:48.000000000 +0100 -+++ jupyterhub-1.1.0/jupyterhub/services/auth.py 2020-12-23 08:24:38.868452725 +0100 -@@ -28,6 +28,7 @@ - from tornado.log import app_log - from tornado.web import HTTPError - from tornado.web import RequestHandler -+from traitlets import Bool - from traitlets import default - from traitlets import Dict - from traitlets import Instance -@@ -191,6 +192,29 @@ - """, - ).tag(config=True) - -+ session_id_required = Bool( -+ os.getenv('JUPYTERHUB_SESSION_ID_REQUIRED', 'false').lower()=="true", -+ help=""" -+ Blocks any requests, if there's no jupyterhub-session-id cookie. -+ """, -+ ).tag(config=True) -+ -+ session_id_required_user = Bool( -+ os.getenv('JUPYTERHUB_SESSION_ID_REQUIRED_USER', 'false').lower()=="true", -+ help=""" -+ Blocks any requests to /user, if there's no jupyterhub-session-id cookie. -+ """, -+ ).tag(config=True) -+ -+ last_session_id_validation = 0 -+ last_session_id_validation_result = None -+ -+ last_session_id_validation_cache_time = Integer( -+ int(os.getenv('JUPYTERHUB_SESSION_ID_CACHE_TIME', "10")), -+ help="""The maximum time (in seconds) to cache the Hub's responses for session_id verification. Default is 10. -+ """, -+ ).tag(config=True) -+ - hub_prefix = Unicode( - '/hub/', - help="""The URL prefix for the Hub itself. -@@ -461,6 +485,18 @@ - """ - return handler.get_cookie('jupyterhub-session-id', '') - -+ def validate_session_id(self, username, session_id): -+ if time.time() - self.last_session_id_validation > self.last_session_id_validation_cache_time: -+ self.last_session_id_validation = int(time.time()) -+ url = url_path_join( -+ self.api_url, "authorizations/sessionid", quote(username, safe=''), -+ ) -+ headers = {"sessionid": session_id} -+ self.last_session_id_validation_result = self._api_request( -+ 'GET', url, allow_404=True, headers=headers -+ ) -+ return self.last_session_id_validation_result -+ - def get_user(self, handler): - """Get the Hub user for a given tornado handler. - -@@ -484,16 +520,33 @@ - handler._cached_hub_user = user_model = None - session_id = self.get_session_id(handler) - -- # check token first -- token = self.get_token(handler) -- if token: -- user_model = self.user_for_token(token, session_id=session_id) -+ if self.session_id_required and not session_id: -+ app_log.info("Unauthorized access. Only users with a session id are allowed.") -+ return {'name': '<session_id_required>', 'kind': 'User'} -+ elif self.session_id_required_user and not session_id and handler.request.uri.startswith('/user'): -+ app_log.info("Unauthorized access. Only users with a session id are allowed to access /user.") -+ return {'name': '<session_id_required>', 'kind': 'User'} -+ elif self.session_id_required or ( self.session_id_required_user and handler.request.uri.startswith('/user')): -+ token = self.get_token(handler) -+ if token: -+ user_model = self.user_for_token(token) -+ if user_model: -+ handler._token_authenticated = True -+ if user_model is None: -+ user_model = self._get_user_cookie(handler) - if user_model: -- handler._token_authenticated = True -+ user_model = self.validate_session_id(user_model.get('name', ''), session_id) -+ else: -+ # check token first -+ token = self.get_token(handler) -+ if token: -+ user_model = self.user_for_token(token, session_id=session_id) -+ if user_model: -+ handler._token_authenticated = True - -- # no token, check cookie -- if user_model is None: -- user_model = self._get_user_cookie(handler) -+ # no token, check cookie -+ if user_model is None: -+ user_model = self._get_user_cookie(handler) - - # cache result - handler._cached_hub_user = user_model -@@ -904,10 +957,16 @@ - # tries to redirect to login URL, 403 will be raised instead. - # This is not the best, but avoids problems that can be caused - # when get_current_user is allowed to raise. -- def raise_on_redirect(*args, **kwargs): -- raise HTTPError( -- 403, "{kind} {name} is not allowed.".format(**user_model) -- ) -+ if user_model.get('name', '') == '<session_id_required>': -+ def raise_on_redirect(*args, **kwargs): -+ raise HTTPError( -+ 401, "Please login to proceed." -+ ) -+ else: -+ def raise_on_redirect(*args, **kwargs): -+ raise HTTPError( -+ 403, "{kind} {name} is not allowed.".format(**user_model) -+ ) - - self.redirect = raise_on_redirect - return diff --git a/Golden_Repo/j/Jupyter/jupyterlab-gitlab-2.0.0_jsc.patch b/Golden_Repo/j/Jupyter/jupyterlab-gitlab-2.0.0_jsc.patch deleted file mode 100644 index a382b73c04e2bd356a7a1ce4813ab80d7cf9591d..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/jupyterlab-gitlab-2.0.0_jsc.patch +++ /dev/null @@ -1,23 +0,0 @@ -diff ---- jupyterlab-gitlab-2.0.0.orig/schema/drive.json 2019-05-02 08:22:45.219143000 +0200 -+++ jupyterlab-gitlab-2.0.0/schema/drive.json 2019-05-02 08:25:15.392631000 +0200 -@@ -6,8 +6,8 @@ - "properties": { - "baseUrl": { - "type": "string", -- "title": "The GitLab Base URL", -- "default": "https://gitlab.com" -+ "title": "JSC GitLab Base URL", -+ "default": "https://gitlab.version.fz-juelich.de" - }, - "accessToken": { - "type": "string", -@@ -18,7 +18,7 @@ - "defaultRepo": { - "type": "string", - "title": "Default Repository", -- "default": "" -+ "default": "jupyter4jsc/j4j_notebooks" - } - }, - "type": "object" diff --git a/Golden_Repo/j/Jupyter/jupyterlab_github-2.0.0_jsc.patch b/Golden_Repo/j/Jupyter/jupyterlab_github-2.0.0_jsc.patch deleted file mode 100644 index ebee87027b2ee62abbf2c08f58506edd7b898f35..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/jupyterlab_github-2.0.0_jsc.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff ---- jupyterlab-github-2.0.0.orig/schema/drive.json 2018-10-10 21:14:53.986184891 +0200 -+++ jupyterlab_github-2.0.0/schema/drive.json 2018-10-11 00:28:34.846777384 +0200 -@@ -18,7 +18,7 @@ - "defaultRepo": { - "type": "string", - "title": "Default Repository", -- "default": "" -+ "default": "FZJ-JSC/jupyter-jsc-notebooks" - } - }, - "type": "object" - diff --git a/Golden_Repo/j/Jupyter/jupyterlmod-packagejson.patch b/Golden_Repo/j/Jupyter/jupyterlmod-packagejson.patch deleted file mode 100644 index 0d166fec22bceb14c1f704b3ee57d04831efe8c8..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/jupyterlmod-packagejson.patch +++ /dev/null @@ -1,17 +0,0 @@ -diff -Naur jupyter-lmod-2.0.2.orig/jupyterlab/package.json jupyter-lmod-2.0.2/jupyterlab/package.json ---- jupyter-lmod-2.0.2.orig/jupyterlab/package.json 2020-11-25 16:19:57.000000000 +0100 -+++ jupyter-lmod-2.0.2/jupyterlab/package.json 2020-12-29 16:50:58.803504000 +0100 -@@ -31,10 +31,12 @@ - "prepare": "npm run clean && npm run build" - }, - "dependencies": { -- "@jupyterlab/application": ">=2.0.0", -+ "@jupyterlab/application": "^2.0.0", -+ "@jupyterlab/apputils": "^2.1.0", - "@jupyterlab/launcher": "^2.1.0" - }, - "devDependencies": { -+ "@types/react": "^16.9.16", - "rimraf": "~2.6.2", - "typescript": "~3.7.3" - }, diff --git a/Golden_Repo/j/Jupyter/jupyterlmod-urlfile.patch b/Golden_Repo/j/Jupyter/jupyterlmod-urlfile.patch deleted file mode 100644 index 0b56aa3e3705f48801d287a08eaad4505016c81f..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/jupyterlmod-urlfile.patch +++ /dev/null @@ -1,37 +0,0 @@ -diff -Naur jupyter-lmod-2.0.1/jupyterlab/src/index.ts jupyter-lmod-2.0.1_devel/jupyterlab/src/index.ts ---- jupyter-lmod-2.0.1/jupyterlab/src/index.ts 2020-10-05 17:08:01.000000000 +0200 -+++ jupyter-lmod-2.0.1_devel/jupyterlab/src/index.ts 2020-11-25 11:55:24.000000000 +0100 -@@ -244,7 +244,13 @@ - const namespace = 'server-proxy'; - const command = namespace + ':' + 'open'; - for (let server_process of data.server_processes) { -- const url = PageConfig.getBaseUrl() + server_process.name + '/'; -+ -+ let urlfile = ''; -+ if (server_process.launcher_entry.urlfile) { -+ urlfile = server_process.launcher_entry.urlfile; -+ } -+ const url = PageConfig.getBaseUrl() + server_process.name + '/' + urlfile; -+ - const title = server_process.launcher_entry.title; - const newBrowserTab = server_process.new_browser_tab; - const id = namespace + ':' + server_process.name; -diff -Naur jupyter-lmod-2.0.1/jupyterlmod/static/main.js jupyter-lmod-2.0.1_devel/jupyterlmod/static/main.js ---- jupyter-lmod-2.0.1/jupyterlmod/static/main.js 2020-10-05 17:08:01.000000000 +0200 -+++ jupyter-lmod-2.0.1_devel/jupyterlmod/static/main.js 2020-11-25 11:55:24.000000000 +0100 -@@ -316,10 +316,14 @@ - .addClass('new-' + server_process.name); - - /* create our list item's link */ -+ let urlfile = ''; -+ if (server_process.launcher_entry.urlfile) { -+ urlfile = server_process.launcher_entry.urlfile; -+ } - let entry_link = $('<a>') - .attr('role', 'menuitem') - .attr('tabindex', '-1') -- .attr('href', base_url + server_process.name + '/') -+ .attr('href', base_url + server_process.name + '/' + urlfile) - .attr('target', '_blank') - .text(server_process.launcher_entry.title); - diff --git a/Golden_Repo/j/Jupyter/jupyterserverproxy-urlfile.patch b/Golden_Repo/j/Jupyter/jupyterserverproxy-urlfile.patch deleted file mode 100644 index de883561447a477f8ad6286b61d0b93c07d0265a..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/jupyterserverproxy-urlfile.patch +++ /dev/null @@ -1,82 +0,0 @@ -diff -Naur jupyter-server-proxy.orig/jupyterlab-server-proxy/src/index.ts jupyter-server-proxy/jupyterlab-server-proxy/src/index.ts ---- jupyter-server-proxy.orig/jupyterlab-server-proxy/src/index.ts 2020-11-19 06:40:34.521630000 +0100 -+++ jupyter-server-proxy/jupyterlab-server-proxy/src/index.ts 2020-11-19 06:41:35.023864000 +0100 -@@ -79,8 +79,11 @@ - if (!server_process.launcher_entry.enabled) { - continue; - } -- -- const url = PageConfig.getBaseUrl() + server_process.name + '/'; -+ var urlfile = ''; -+ if (server_process.launcher_entry.urlfile) { -+ urlfile = server_process.launcher_entry.urlfile; -+ } -+ const url = PageConfig.getBaseUrl() + server_process.name + '/' + urlfile; - const title = server_process.launcher_entry.title; - const newBrowserTab = server_process.new_browser_tab; - const id = namespace + ':' + server_process.name; -diff -Naur jupyter-server-proxy.orig/jupyter_server_proxy/api.py jupyter-server-proxy/jupyter_server_proxy/api.py ---- jupyter-server-proxy.orig/jupyter_server_proxy/api.py 2020-11-19 06:40:34.516286000 +0100 -+++ jupyter-server-proxy/jupyter_server_proxy/api.py 2020-11-19 06:41:35.015090000 +0100 -@@ -19,7 +19,8 @@ - 'name': sp.name, - 'launcher_entry': { - 'enabled': sp.launcher_entry.enabled, -- 'title': sp.launcher_entry.title -+ 'title': sp.launcher_entry.title, -+ 'urlfile': sp.launcher_entry.urlfile, - }, - 'new_browser_tab' : sp.new_browser_tab - } -diff -Naur jupyter-server-proxy.orig/jupyter_server_proxy/config.py jupyter-server-proxy/jupyter_server_proxy/config.py ---- jupyter-server-proxy.orig/jupyter_server_proxy/config.py 2020-11-19 06:40:34.516753000 +0100 -+++ jupyter-server-proxy/jupyter_server_proxy/config.py 2020-11-19 06:41:35.016181000 +0100 -@@ -99,7 +99,7 @@ - )) - return handlers - --LauncherEntry = namedtuple('LauncherEntry', ['enabled', 'icon_path', 'title']) -+LauncherEntry = namedtuple('LauncherEntry', ['enabled', 'icon_path', 'title', 'urlfile']) - ServerProcess = namedtuple('ServerProcess', [ - 'name', 'command', 'environment', 'timeout', 'absolute_url', 'port', 'mappath', 'launcher_entry', 'new_browser_tab']) - -@@ -116,7 +116,8 @@ - launcher_entry=LauncherEntry( - enabled=le.get('enabled', True), - icon_path=le.get('icon_path'), -- title=le.get('title', name) -+ title=le.get('title', name), -+ urlfile=le.get('urlfile', ''), - ), - new_browser_tab=server_process_config.get('new_browser_tab', True) - ) -@@ -175,6 +176,10 @@ - title - Title to be used for the launcher entry. Defaults to the name of the server if missing. - -+ urlfile -+ URL file name and URL parameters to be added to the base URL of the launcher entry. -+ Default is none. -+ - new_browser_tab - Set to True (default) to make the proxied server interface opened as a new browser tab. Set to False - to have it open a new JupyterLab tab. This has no effect in classic notebook. -diff -Naur jupyter-server-proxy.orig/jupyter_server_proxy/static/tree.js jupyter-server-proxy/jupyter_server_proxy/static/tree.js ---- jupyter-server-proxy.orig/jupyter_server_proxy/static/tree.js 2020-11-19 06:40:34.519694000 +0100 -+++ jupyter-server-proxy/jupyter_server_proxy/static/tree.js 2020-11-19 06:41:35.020051000 +0100 -@@ -33,10 +33,14 @@ - .addClass('new-' + server_process.name); - - /* create our list item's link */ -+ var urlfile = ''; -+ if (server_process.launcher_entry.urlfile) { -+ urlfile = server_process.launcher_entry.urlfile; -+ } - var $entry_link = $('<a>') - .attr('role', 'menuitem') - .attr('tabindex', '-1') -- .attr('href', base_url + server_process.name + '/') -+ .attr('href', base_url + server_process.name + '/' + urlfile) - .attr('target', '_blank') - .text(server_process.launcher_entry.title); - diff --git a/Golden_Repo/j/Jupyter/notebook-6.0.3_jsc.patch b/Golden_Repo/j/Jupyter/notebook-6.0.3_jsc.patch deleted file mode 100644 index d7a9bb20d7d6eea484462bd8bf68c8e018546b4e..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/Jupyter/notebook-6.0.3_jsc.patch +++ /dev/null @@ -1,143 +0,0 @@ -diff ---- notebook-6.0.3.orig/notebook/services/contents/filecheckpoints.py 2020-04-28 06:26:51.384296390 +0200 -+++ notebook-6.0.3/notebook/services/contents/filecheckpoints.py 2020-04-28 06:30:55.139342947 +0200 -@@ -14,7 +14,7 @@ - - from jupyter_core.utils import ensure_dir_exists - from ipython_genutils.py3compat import getcwd --from traitlets import Unicode -+from traitlets import Unicode, Integer - - from notebook import _tz as tz - -@@ -28,6 +28,35 @@ - you want file-based checkpoints with another ContentsManager. - """ - -+ checkpoint_permissions = Integer( -+ 0o644, -+ config=True, -+ help="""The permission of your checkpoint files. -+ -+ By default, it is 0o644 -+ """, -+ ) -+ -+ restore_permissions = Integer( -+ 0o644, -+ config=True, -+ help="""The permission of a restored checkpoint. -+ -+ By default, it is 0o644 -+ """, -+ ) -+ -+ checkpoint_dir_umask = Integer( -+ 0o022, -+ config=True, -+ help="""The umask for the checkpoint directory. -+ -+ Keep in mind that it depends on your system umask, too. -+ -+ By default, it is 0o022 -+ """, -+ ) -+ - checkpoint_dir = Unicode( - '.ipynb_checkpoints', - config=True, -@@ -50,21 +79,29 @@ - # ContentsManager-dependent checkpoint API - def create_checkpoint(self, contents_mgr, path): - """Create a checkpoint.""" -+ original_umask = os.umask(self.checkpoint_dir_umask) - checkpoint_id = u'checkpoint' - src_path = contents_mgr._get_os_path(path) - dest_path = self.checkpoint_path(checkpoint_id, path) - self._copy(src_path, dest_path) -- return self.checkpoint_model(checkpoint_id, dest_path) -+ os.chmod(dest_path, self.checkpoint_permissions) -+ ret = self.checkpoint_model(checkpoint_id, dest_path) -+ os.umask(original_umask) -+ return ret - - def restore_checkpoint(self, contents_mgr, checkpoint_id, path): - """Restore a checkpoint.""" -+ original_umask = os.umask(self.checkpoint_dir_umask) - src_path = self.checkpoint_path(checkpoint_id, path) - dest_path = contents_mgr._get_os_path(path) - self._copy(src_path, dest_path) -+ os.chmod(dest_path, self.restore_permissions) -+ os.umask(original_umask) - - # ContentsManager-independent checkpoint API - def rename_checkpoint(self, checkpoint_id, old_path, new_path): - """Rename a checkpoint from old_path to new_path.""" -+ original_umask = os.umask(self.checkpoint_dir_umask) - old_cp_path = self.checkpoint_path(checkpoint_id, old_path) - new_cp_path = self.checkpoint_path(checkpoint_id, new_path) - if os.path.isfile(old_cp_path): -@@ -75,9 +112,11 @@ - ) - with self.perm_to_403(): - shutil.move(old_cp_path, new_cp_path) -+ os.umask(original_umask) - - def delete_checkpoint(self, checkpoint_id, path): - """delete a file's checkpoint""" -+ original_umask = os.umask(self.checkpoint_dir_umask) - path = path.strip('/') - cp_path = self.checkpoint_path(checkpoint_id, path) - if not os.path.isfile(cp_path): -@@ -86,23 +125,29 @@ - self.log.debug("unlinking %s", cp_path) - with self.perm_to_403(): - os.unlink(cp_path) -+ os.umask(original_umask) - - def list_checkpoints(self, path): - """list the checkpoints for a given file - - This contents manager currently only supports one checkpoint per file. - """ -+ original_umask = os.umask(self.checkpoint_dir_umask) - path = path.strip('/') - checkpoint_id = "checkpoint" - os_path = self.checkpoint_path(checkpoint_id, path) - if not os.path.isfile(os_path): -+ os.umask(original_umask) - return [] - else: -- return [self.checkpoint_model(checkpoint_id, os_path)] -+ ret = self.checkpoint_model(checkpoint_id, os_path) -+ os.umask(original_umask) -+ return [ret] - - # Checkpoint-related utilities - def checkpoint_path(self, checkpoint_id, path): - """find the path to a checkpoint""" -+ original_umask = os.umask(self.checkpoint_dir_umask) - path = path.strip('/') - parent, name = ('/' + path).rsplit('/', 1) - parent = parent.strip('/') -@@ -117,16 +162,19 @@ - with self.perm_to_403(): - ensure_dir_exists(cp_dir) - cp_path = os.path.join(cp_dir, filename) -+ os.umask(original_umask) - return cp_path - - def checkpoint_model(self, checkpoint_id, os_path): - """construct the info dict for a given checkpoint""" -+ original_umask = os.umask(self.checkpoint_dir_umask) - stats = os.stat(os_path) - last_modified = tz.utcfromtimestamp(stats.st_mtime) - info = dict( - id=checkpoint_id, - last_modified=last_modified, - ) -+ os.umask(original_umask) - return info - - # Error Handling - diff --git a/Golden_Repo/j/JupyterCollection/JupyterCollection-2020.2.5-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/j/JupyterCollection/JupyterCollection-2020.2.5-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 3c95d6f138d593c2b7672401e04b22170c4fc4d9..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterCollection/JupyterCollection-2020.2.5-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterCollection' -version = '2020.2.5' - -local_pysuffix = '-Python-3.8.5' - -homepage = 'http://www.jupyter.org' -description = """ -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -dependencies = [ - ('JupyterKernel-Bash', '0.7.1', '-' + version), - ('JupyterKernel-Cling', '0.7', '-' + version), - ('JupyterKernel-JavaScript', '5.2.0', '-' + version), - ('JupyterKernel-Julia', '1.5.2', '-' + version), - ('JupyterKernel-Octave', '6.1.0', '-' + version), - ('JupyterKernel-PyParaView', '5.8.1', '-' + version), - # ('JupyterKernel-PyQuantum', '1.1', '-' + version), - ('JupyterKernel-R', '4.0.2', '-' + version), - ('JupyterKernel-Ruby', '2.7.1', '-' + version), - ('JupyterProxy-XpraHTML5', '0.3.0', '-' + version), - ('Jupyter', version, local_pysuffix), -] - -skipsteps = ['configure', 'build', 'install', 'sanity_check'] - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterCollection/JupyterCollection-2020.2.6-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/j/JupyterCollection/JupyterCollection-2020.2.6-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index af43eeda85e6b9e013085fcb6514f655eb62b2b6..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterCollection/JupyterCollection-2020.2.6-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterCollection' -version = '2020.2.6' - -local_pysuffix = '-Python-3.8.5' - -homepage = 'http://www.jupyter.org' -description = """ -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -dependencies = [ - ('JupyterProxy-XpraHTML5', '0.3.0', '-' + version), - # ('JupyterProxy-Matlab', '0.1.0', '-' + version), - ('JupyterKernel-Bash', '0.7.1', '-' + version), - ('JupyterKernel-Cling', '0.9', '-' + version), - ('JupyterKernel-JavaScript', '5.2.0', '-' + version), - ('JupyterKernel-Julia', '1.6.1', '-' + version), - ('JupyterKernel-Octave', '6.1.0', '-' + version), - ('JupyterKernel-PyParaView', '5.8.1', '-' + version), - ('JupyterKernel-PyQuantum', '1.1', '-' + version), - ('JupyterKernel-PyDeepLearning', '1.0', '-' + version), - ('JupyterKernel-R', '4.0.2', '-' + version), - ('JupyterKernel-Ruby', '2.7.1', '-' + version), - ('Jupyter', version, local_pysuffix), -] - -skipsteps = ['configure', 'build', 'install', 'sanity_check'] - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Bash/JupyterKernel-Bash-0.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterKernel-Bash/JupyterKernel-Bash-0.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index 65d3c8c014f31e4058e31c0235782028857441e8..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Bash/JupyterKernel-Bash-0.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,98 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Bash' -version = '0.7.1' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/takluyver/bash_kernel' -description = """ -Native Bash kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, -} - -exts_list = [ - # 0.7.2 fails with BackendUnavailable. Might be fixable --no-use-pep517 - ('bash_kernel', '0.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '29f895819e076e3f225e37034b70b5265a559e2964e020c942024f51ea6153e8')]), - ('use_pip', True), - ])), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - # Jupyter Kernel: Bash - https://github.com/takluyver/bash_kernel - # installs bash_kernel in $JUPYTER_DATA_DIR/kernels - 'source %(builddir)s/env.sh && ${EBROOTPYTHON}/bin/python3 -m bash_kernel.install --user', - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm -r %(installdir)s/bin', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/bash/kernel.json', - ], - 'dirs': [ - 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/bash/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Bash/JupyterKernel-Bash-0.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-Bash/JupyterKernel-Bash-0.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index b5b38519821c620b7458952b7791f196a48f98ff..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Bash/JupyterKernel-Bash-0.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,98 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Bash' -version = '0.7.1' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/takluyver/bash_kernel' -description = """ -Native Bash kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, -} - -exts_list = [ - # 0.7.2 fails with BackendUnavailable. Might be fixable --no-use-pep517 - ('bash_kernel', '0.7.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '29f895819e076e3f225e37034b70b5265a559e2964e020c942024f51ea6153e8')]), - ('use_pip', True), - ])), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - # Jupyter Kernel: Bash - https://github.com/takluyver/bash_kernel - # installs bash_kernel in $JUPYTER_DATA_DIR/kernels - 'source %(builddir)s/env.sh && ${EBROOTPYTHON}/bin/python3 -m bash_kernel.install --user', - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm -r %(installdir)s/bin', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/bash/kernel.json', - ], - 'dirs': [ - 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/bash/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.7-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.7-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index 8de48fc00e6eafc78dd98cda09d2bc72fde8d6cb..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.7-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,93 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Cling' -version = '0.7' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/root-project/cling' -description = """ -Native C kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - ('Cling', version), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - # Jupyter Kernel: Cling (C++) - 'source %(builddir)s/env.sh && pip3 install ${EBROOTCLING}/share/cling/Jupyter/kernel', - ( - 'source %(builddir)s/env.sh && ' - ' jupyter-kernelspec install --prefix=%(installdir)s ${EBROOTCLING}/share/cling/Jupyter/kernel/cling-cpp17' - ), - - # correct shebang to correct python binary - ( - 'source %(builddir)s/env.sh && ' - ' abs2python="#! ${EBROOTPYTHON}/bin/python" && ' - ' sed "1s@^.*@$abs2python@g" -i %(installdir)s/bin/jupyter-cling-kernel' - ), - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm %(installdir)s/bin/python', - 'rm %(installdir)s/bin/python3', - 'rm %(installdir)s/bin/activate', - 'rm %(installdir)s/bin/activate*', - 'rm %(installdir)s/bin/easy_install*', - 'rm %(installdir)s/bin/pip*', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/cling-cpp17/kernel.json', - ], - 'dirs': [ - # 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/cling-cpp17/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.7-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.7-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index d0fd550e250dc3d550ccb2243e2bbed5cc3e467b..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.7-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,93 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Cling' -version = '0.7' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/root-project/cling' -description = """ -Native C kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - ('Cling', version), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - # Jupyter Kernel: Cling (C++) - 'source %(builddir)s/env.sh && pip3 install ${EBROOTCLING}/share/cling/Jupyter/kernel', - ( - 'source %(builddir)s/env.sh && ' - ' jupyter-kernelspec install --prefix=%(installdir)s ${EBROOTCLING}/share/cling/Jupyter/kernel/cling-cpp17' - ), - - # correct shebang to correct python binary - ( - 'source %(builddir)s/env.sh && ' - ' abs2python="#! ${EBROOTPYTHON}/bin/python" && ' - ' sed "1s@^.*@$abs2python@g" -i %(installdir)s/bin/jupyter-cling-kernel' - ), - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm %(installdir)s/bin/python', - 'rm %(installdir)s/bin/python3', - 'rm %(installdir)s/bin/activate', - 'rm %(installdir)s/bin/activate*', - 'rm %(installdir)s/bin/easy_install*', - 'rm %(installdir)s/bin/pip*', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/cling-cpp17/kernel.json', - ], - 'dirs': [ - # 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/cling-cpp17/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.9-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.9-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 93871154f077c1dcea9ce622c4df04adb742eae0..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Cling/JupyterKernel-Cling-0.9-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,93 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Cling' -version = '0.9' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/root-project/cling' -description = """ -Native C kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - ('Cling', version), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - # Jupyter Kernel: Cling (C++) - ( - 'source %(builddir)s/env.sh && ' - ' pip3 install ${EBROOTCLING}/share/cling/Jupyter/kernel && ' - 'jupyter-kernelspec install --prefix=%(installdir)s ${EBROOTCLING}/share/cling/Jupyter/kernel/cling-cpp17' - ), - - # correct shebang to correct python binary - ( - 'source %(builddir)s/env.sh && ' - ' abs2python="#! ${EBROOTPYTHON}/bin/python" && ' - ' sed "1s@^.*@$abs2python@g" -i %(installdir)s/bin/jupyter-cling-kernel' - ), - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm %(installdir)s/bin/python', - 'rm %(installdir)s/bin/python3', - 'rm %(installdir)s/bin/activate', - 'rm %(installdir)s/bin/activate*', - 'rm %(installdir)s/bin/easy_install*', - 'rm %(installdir)s/bin/pip*', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/cling-cpp17/kernel.json', - ], - 'dirs': [ - # 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/cling-cpp17/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-JavaScript/JupyterKernel-JavaScript-5.2.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterKernel-JavaScript/JupyterKernel-JavaScript-5.2.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index 63f3f746f56cbc0dc82f569bf0ad6eff20181df8..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-JavaScript/JupyterKernel-JavaScript-5.2.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,71 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-JavaScript' -version = '5.2.0' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-3.8.5' - -homepage = 'https://www.npmjs.com/package/ijavascript' -description = """ -Native JavaScript kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'PATH': ['bin'], - 'NODE_PATH': ['lib/node_modules'], # npm´s search path to extra modules - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'export PATH=%%(installdir)s/bin:${PATH}\n' - 'export NODE_PATH=%%(installdir)s/lib/node_modules\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - 'source %(builddir)s/env.sh && npm install ijavascript@5.2.0 -g --prefix %(installdir)s', - # installs ijavascript in $JUPYTER_DATA_DIR/kernels - 'source %(builddir)s/env.sh && ijsinstall --install=local', - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/javascript/kernel.json', - ], - 'dirs': [ - 'lib/node_modules/', - 'share/jupyter/kernels/javascript/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-JavaScript/JupyterKernel-JavaScript-5.2.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-JavaScript/JupyterKernel-JavaScript-5.2.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 93841d50ef45ff696936b1227f04cc49c4ba1ba1..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-JavaScript/JupyterKernel-JavaScript-5.2.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,70 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-JavaScript' -version = '5.2.0' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-3.8.5' - -homepage = 'https://www.npmjs.com/package/ijavascript' -description = """ -Native JavaScript kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'NODE_PATH': ['lib/node_modules'], # npm´s search path to extra modules - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'export PATH=%%(installdir)s/bin:${PATH}\n' - 'export NODE_PATH=%%(installdir)s/lib/node_modules\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - 'source %(builddir)s/env.sh && npm install ijavascript@5.2.0 -g --prefix %(installdir)s', - # installs ijavascript in $JUPYTER_DATA_DIR/kernels - 'source %(builddir)s/env.sh && ijsinstall --install=local', - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/javascript/kernel.json', - ], - 'dirs': [ - 'lib/node_modules/', - 'share/jupyter/kernels/javascript/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index 735b3cd4ce2e2a3e87d92493c03d91c7d5347ef9..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,117 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Julia' -version = '1.5.2' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/IRkernel/IRkernel' -description = """ -Native R kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - ('Julia', version), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, -} - -exts_list = [ - ('julia', '0.5.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'b13207125709fdba069a25c4e54ff366f0bc308807b2aa0f3b66924101799c58')]), - ('use_pip', True), - ])), -] - -local_jupyter_path = 'share/jupyter' -local_julia_depot_path = "%(installdir)s/share/julia/site/" # for Julia packages needed for Jupyter - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -modextravars = { - 'JULIA_DEPOT_PATH': local_julia_depot_path, -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - '' - 'export JULIA_DEPOT_PATH=%s\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_julia_depot_path, local_jupyter_path), - - # installs ijulia in JULIA_DEPOT_PATH and kernel in $JUPYTER_DATA_DIR/kernels - 'source %(builddir)s/env.sh && julia -e \'using Pkg; Pkg.add("IJulia"); Pkg.build("IJulia")\'', - - # to trigger the precompilation - 'source %(builddir)s/env.sh && julia -e \'using IJulia\'', - - # adjust permissions of precompiled files - 'for i in $(find %s); do chmod +r $i; done' % local_julia_depot_path, - - # configure Python<->Julia bridge (of python package julia) - 'source %(builddir)s/env.sh && python -c "import julia; julia.install()"', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm %(installdir)s/bin/python', - 'rm %(installdir)s/bin/python3', - 'rm %(installdir)s/bin/activate', - 'rm %(installdir)s/bin/activate*', - 'rm %(installdir)s/bin/easy_install*', - 'rm %(installdir)s/bin/pip*', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/julia-%(version_major_minor)s/kernel.json', - ], - 'dirs': [ - 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/julia-%(version_major_minor)s/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index e8addb10368bb9ab9dd1dc9f0864ca2755bfbbeb..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.5.2-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,117 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Julia' -version = '1.5.2' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/JuliaLang/IJulia.jl' -description = """ -Native Julia kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - ('Julia', version), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, -} - -exts_list = [ - ('julia', '0.5.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '378d0377f75bb0e3bfc4cce19a56d3bf5a9a7be38e370e3a7cf3359bf4cd0378')]), - ('use_pip', True), - ])), -] - -local_jupyter_path = 'share/jupyter' -local_julia_depot_path = "%(installdir)s/share/julia/site/" # for Julia packages needed for Jupyter - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -modextravars = { - 'JULIA_DEPOT_PATH': local_julia_depot_path, -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - '' - 'export JULIA_DEPOT_PATH=%s\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_julia_depot_path, local_jupyter_path), - - # installs ijulia in JULIA_DEPOT_PATH and kernel in $JUPYTER_DATA_DIR/kernels - 'source %(builddir)s/env.sh && julia -e \'using Pkg; Pkg.add("IJulia"); Pkg.build("IJulia")\'', - - # to trigger the precompilation - 'source %(builddir)s/env.sh && julia -e \'using IJulia\'', - - # adjust permissions of precompiled files - 'for i in $(find %s); do chmod +r $i; done' % local_julia_depot_path, - - # configure Python<->Julia bridge (of python package julia) - 'source %(builddir)s/env.sh && python -c "import julia; julia.install()"', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm %(installdir)s/bin/python', - 'rm %(installdir)s/bin/python3', - 'rm %(installdir)s/bin/activate', - 'rm %(installdir)s/bin/activate*', - 'rm %(installdir)s/bin/easy_install*', - 'rm %(installdir)s/bin/pip*', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/julia-%(version_major_minor)s/kernel.json', - ], - 'dirs': [ - 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/julia-%(version_major_minor)s/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.6.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.6.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 040021373b0b226c8ee207dc39278e82d48e5b4f..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Julia/JupyterKernel-Julia-1.6.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,119 +0,0 @@ -easyblock = 'JuliaBundle' - -name = 'JupyterKernel-Julia' -version = '1.6.1' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/JuliaLang/IJulia.jl' -description = """ -Native Julia kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Julia', version), -] - -components = [ - ('julia', '0.5.6', { - 'easyblock': 'PythonPackage', - 'req_py_majver': '3', - 'req_py_minver': '0', - 'use_pip': True, - 'sources': ['v%(version)s.tar.gz'], - 'source_urls': ['https://github.com/JuliaPy/pyjulia/archive/'], - 'checksums': [('sha256', 'ca4a1dc3df9b770dacbbecab5495cae817a5dde0ac2d3ff1db1f8e447f0e48b7')], - 'download_dep_fail': True, - 'start_dir': 'pyjulia-%(version)s', - }), -] - -local_jupyter_path = 'share/jupyter' - -exts_defaultclass = 'JuliaPackage' -exts_list = [ - ('ZMQ', '1.2.1', { - 'source_tmpl': 'v1.2.1.tar.gz', - 'source_urls': ['https://github.com/JuliaInterop/ZMQ.jl/archive/'], - }), - ('IJulia', '1.23.2', { - # installs ijulia in JULIA_DEPOT_PATH and kernel in $JUPYTER_DATA_DIR/kernels - 'source_tmpl': 'v1.23.2.tar.gz', - 'source_urls': ['https://github.com/JuliaLang/IJulia.jl/archive/'], - 'preinstallopts': 'export JUPYTER_DATA_DIR=%%(installdir)s/%s' % local_jupyter_path - }), - ('Interact', '0.10.3', { - 'source_tmpl': 'v0.10.3.tar.gz', - 'source_urls': ['https://github.com/JuliaGizmos/Interact.jl/archive/'], - }), - ('LanguageServer', '3.2.0', { - 'source_tmpl': 'v3.2.0.tar.gz', - 'source_urls': ['https://github.com/julia-vscode/LanguageServer.jl/archive/'], - }), -] - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': [local_jupyter_path], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - 'mkdir -p %%(installdir)s/%s' % local_jupyter_path, - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - '' - 'export JULIA_DEPOT_PATH=${EBJULIA_STD_DEPOT_PATH}:${EBJULIA_ADMIN_DEPOT_PATH}\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - # configure Python<->Julia bridge (of python package julia) - 'source %(builddir)s/env.sh && python -c "import julia; julia.install()"', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm %(installdir)s/bin/python', - 'rm %(installdir)s/bin/python3', - 'rm %(installdir)s/bin/activate', - 'rm %(installdir)s/bin/activate*', - 'rm %(installdir)s/bin/easy_install*', - 'rm %(installdir)s/bin/pip*', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/julia-%(version_major_minor)s/kernel.json', - ], - 'dirs': [ - 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/julia-%(version_major_minor)s/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Octave/JupyterKernel-Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterKernel-Octave/JupyterKernel-Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index c712e30c7783628b321266ffa1c3e9b35d38f66e..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Octave/JupyterKernel-Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,137 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Octave' -version = '6.1.0' -local_octavever = '6.1.0' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/Calysto/octave_kernel' -description = """ -Native Octave kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('Octave', local_octavever, '-nompi'), # ensure it is available -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - # no dependency to Octave as it is loaded in kernel.sh -] - -local_jupyter_path = 'share/jupyter' -local_kernel_dir = 'octave' -local_kernel_name = 'Octave-%s' % local_octavever - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - # enable use of Octave from production stage Stages/2020 - 'source %(builddir)s/env.sh && pip3 install ipykernel ', - 'rm -rf %(installdir)s/share/jupyter/kernels/', # remove any kernel installed by ipykernel - 'source %(builddir)s/env.sh && pip3 install ipyparallel ', - - # install Python package octave_kernel - 'source %(builddir)s/env.sh && pip3 install octave_kernel==0.32.0 ', - - # write kernel.sh - ( - '{ cat >> %%(builddir)s/env.sh; } << \'EOF\'\n' - 'export KERNEL_DIR=%s\n' - 'export KERNEL_NAME=%s\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_name), - ( - '{ source %%(builddir)s/env.sh && ' - ' cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.sh; } << \'EOF\'\n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module load Stages/2020 \n' - 'module load GCC/9.3.0 \n' - '\n' \ - 'module load Python/%%(pyver)s \n' - 'module load Jupyter/%s%s \n' - 'module load Octave/%s-nompi \n' - '\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH} \n' - '\n' - 'exec python $@\n' - 'EOF' - ) % (local_kernel_dir, local_jupyterver, local_pysuffix, local_octavever), - 'source %(builddir)s/env.sh && chmod +x %(installdir)s/share/jupyter/kernels/${KERNEL_DIR}/kernel.sh', - - # write kernel.json - ( - '{ source %%(builddir)s/env.sh && ' - ' cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.json; } << \'EOF\'\n' - '{ \n' - ' "argv": [ \n' - ' "%%(installdir)s/share/jupyter/kernels/%s/kernel.sh", \n' - ' "-m", \n' - ' "octave_kernel", \n' - ' "-f", \n' - ' "{connection_file}" \n' - ' ], \n' - ' "display_name": "%s", \n' - ' "mimetype": "text/x-octave", \n' - ' "language": "python", \n' - ' "name": "%s" \n' - '}\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_name, local_kernel_name), - - # ensure correct permissions - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm -r %(installdir)s/bin', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - 'share/jupyter/kernels/%s/kernel.json' % local_kernel_dir, - ], - 'dirs': [ - 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/octave/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Octave/JupyterKernel-Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-Octave/JupyterKernel-Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 2e12dd84d3bca1b9037c4c5227d874f06fd86c4a..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Octave/JupyterKernel-Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,138 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Octave' -version = '6.1.0' -local_octavever = '6.1.0' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/Calysto/octave_kernel' -description = """ -Native Octave kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('Octave', local_octavever, '-nompi'), # ensure it is available -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - # no dependency to Octave as it is loaded in kernel.sh -] - -local_jupyter_path = 'share/jupyter' -local_kernel_dir = 'octave' -local_kernel_name = 'Octave-%s' % local_octavever - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages'], - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Create virtual environment to ensure we install in the correct directory !!! - 'python3 -m venv %(installdir)s --system-site-packages', - ( - '{ cat > %%(builddir)s/env.sh; } << \'EOF\'\n' - '#!/bin/bash\n' - 'source %%(installdir)s/bin/activate\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:${PYTHONPATH}\n' - 'export JUPYTER_DATA_DIR=%%(installdir)s/%s\n' - 'EOF' - ) % (local_jupyter_path), - - # enable use of Octave from production stage Stages/2020 - 'source %(builddir)s/env.sh && pip3 install ipykernel ', - 'rm -rf %(installdir)s/share/jupyter/kernels/', # remove any kernel installed by ipykernel - 'source %(builddir)s/env.sh && pip3 install ipyparallel ', - - # install Python package octave_kernel - 'source %(builddir)s/env.sh && pip3 install octave_kernel==0.32.0 ', - - # write kernel.sh - ( - '{ cat >> %%(builddir)s/env.sh; } << \'EOF\'\n' - 'export KERNEL_DIR=%s\n' - 'export KERNEL_NAME=%s\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_name), - ( - '{ source %%(builddir)s/env.sh && ' - ' cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.sh; } << EOF\n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module use \$OTHERSTAGES \n' - 'module load Stages/${STAGE} \n' - 'module load GCCcore/.9.3.0 \n' - '\n' \ - 'module load Python/%%(pyver)s \n' - 'module load Jupyter/%s%s \n' - 'module load Octave/%s-nompi \n' - '\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:\${PYTHONPATH} \n' - '\n' - 'exec python \$@\n' - 'EOF' - ) % (local_kernel_dir, local_jupyterver, local_pysuffix, local_octavever), - 'source %(builddir)s/env.sh && chmod +x %(installdir)s/share/jupyter/kernels/${KERNEL_DIR}/kernel.sh', - - # write kernel.json - ( - '{ source %%(builddir)s/env.sh && ' - ' cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.json; } << \'EOF\'\n' - '{ \n' - ' "argv": [ \n' - ' "%%(installdir)s/share/jupyter/kernels/%s/kernel.sh", \n' - ' "-m", \n' - ' "octave_kernel", \n' - ' "-f", \n' - ' "{connection_file}" \n' - ' ], \n' - ' "display_name": "%s", \n' - ' "mimetype": "text/x-octave", \n' - ' "language": "python", \n' - ' "name": "%s" \n' - '}\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_name, local_kernel_name), - - # ensure correct permissions - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', - - # Ensure we remove the virtuel environment to avoid wrong search path for python packages - 'rm %(installdir)s/pyvenv.cfg', - 'rm -r %(installdir)s/bin', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - 'share/jupyter/kernels/%s/kernel.json' % local_kernel_dir, - ], - 'dirs': [ - 'lib/python%(pyshortver)s/site-packages', - 'share/jupyter/kernels/octave/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-PyDeepLearning/JupyterKernel-PyDeepLearning-1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-PyDeepLearning/JupyterKernel-PyDeepLearning-1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index c80337c195b12e7c6ff9096758e511af6b17dec2..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyDeepLearning/JupyterKernel-PyDeepLearning-1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,153 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'JupyterKernel-PyDeepLearning' -version = '1.0' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://www.fz-juelich.de' -description = """ -Special DeepLearning kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - # just ensure they exist - ('FFmpeg', '4.3.1'), - ('OpenCV', '4.5.0', local_pysuffix), - ('TensorFlow', '2.3.1', local_pysuffix), - ('PyTorch', '1.7.0', local_pysuffix), - ('OpenAI-Gym', '0.18.0', local_pysuffix), - # ('Horovod', '0.20.3', local_pysuffix, ('gomkl', '2020')), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -components = [ - ('logos', '1.0', { - 'easyblock': 'Binary', - 'sources': [ - {'filename': 'logo-32x32.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-32x32.png"}, - {'filename': 'logo-64x64.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-64x64.png"}, - {'filename': 'logo-128x128.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-128x128.png"}, - ], - }), -] - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': True, - 'download_dep_fail': True, - 'use_pip_for_deps': False, -} - -exts_list = [ - ('lmdb', '1.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '165cd1669b29b16c2d5cc8902b90fede15a7ee475c54d466f1444877a3f511ac')]), - ])), - ('gviz_api', '1.9.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '43d13ccc21834d0501b33a291ef3265e933dbb4bbdca3d34b1ed0a048c0ef640')]), - ])), - ('tensorboard_plugin_profile', '2.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'dfbf254ee960440e3b2518324f876a6d6704c60b936887d99214fa36988a206a')]), - ])), - ('tensorflow_hub', '0.11.0', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'tensorflow_hub-0.11.0-py2.py3-none-any.whl'), - ('checksums', [('sha256', '19399a8abef10682b4f739a5aa78b43da3937df17f5d2afb0547945798787674')]), - ('unpack_sources', False), - ('modulename', False), # skip sanity check as it will fail without TensorFlow - ])), -] - -local_kernel_dir = 'pydeeplearning' -local_kernel_name = 'PyDeepLearning-%s' % version - -modextrapaths = { - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs - 'HOROVOD_MPI_THREADS_DISABLE': ['1'], # no mpi by default -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # create kernel skeleton - ( - 'python -m ipykernel install --name=%s --prefix=%%(installdir)s && ' - 'mv %%(installdir)s/logo-32x32.png %%(installdir)s/share/jupyter/kernels/%s/logo-32x32.png && ' - 'mv %%(installdir)s/logo-64x64.png %%(installdir)s/share/jupyter/kernels/%s/logo-64x64.png && ' - 'mv %%(installdir)s/logo-128x128.png %%(installdir)s/share/jupyter/kernels/%s/logo-128x128.png' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_dir, local_kernel_dir), - - # write kernel.sh - ( - '{ cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.sh; } << EOF\n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module use \$OTHERSTAGES \n' - 'module load Stages/${STAGE} \n' - 'module load GCC/9.3.0 \n' - 'module load OpenMPI \n' - 'module load Horovod/0.20.3-Python-3.8.5 \n' - 'module load %s/.%s%s \n' - '\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:\$PYTHONPATH \n' - 'exec python -m ipykernel \$@\n' - '\n' - 'EOF' - ) % (local_kernel_dir, name, version, versionsuffix), - 'chmod +x %%(installdir)s/share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - - # write kernel.json - ( - '{ cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.json; } << \'EOF\'\n' - '{ \n' - ' "argv": [ \n' - ' "%%(installdir)s/share/jupyter/kernels/%s/kernel.sh", \n' - ' "-m", \n' - ' "ipykernel_launcher", \n' - ' "-f", \n' - ' "{connection_file}" \n' - ' ], \n' - ' "display_name": "%s", \n' - ' "language": "python", \n' - ' "name": "%s" \n' - '}\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_name, local_kernel_name), -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - 'share/jupyter/kernels/%s/kernel.json' % local_kernel_dir, - ], - 'dirs': [ - 'share/jupyter/kernels/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-128x128.png.base64 b/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-128x128.png.base64 deleted file mode 100644 index a45503592a47dc32687b032e19bd6f54555db86e..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-128x128.png.base64 +++ /dev/null @@ -1,164 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAAABmJLR0QA/wD/AP+gvaeTAAAACXBI -WXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5QIGDS0A/QTlkQAAAB1pVFh0Q29tbWVudAAAAAAAQ3Jl -YXRlZCB3aXRoIEdJTVBkLmUHAAAgAElEQVR42u2d+XNb15XnP+dh4aZ9swBosyxvsmzHu53I8e7E -jmNn7XGnDIBO0tXprq781FM1P03X/ANTNVMz6YwnbYkP7lTSsTNxnHhTvMuWLe9rtDqWROBR+y5u -wDvzw70PeABBEgQpiRF5q1CiQPDhvXv271kuTK8pvWTKPrlbEKAFiNh3SsAAmaQ/lbYhOoWZvwO4 -BZhv/38AeB04Os0AU0P5xYALEFL2jT3AO1NtF6ayBgjUfjH0s061DXCm3aCpvaYZYJoBptc0A0yv -KRsOR6cJj1P+V6ceI0xlBjgC/Lws/YraSGB6Ta/pNb2mnZ9zfLkFB2hDJJwL6CWdmM4FTJE1E+R7 -QML+Pw88hckJTDPAX8Va77XgaDtIC2gURBAtoTKA+r1kUydHUH5GA5ikEPbnKRcW//UxwL/tEmKx -JCLnoSQRWYAyC6QVwQEZAE4gziFyXg+wFyVPJnGqztUU8EM/6zQDTOaV85YCq4FVwBJDcEpIDfGE -+cBKK9GHEL7A9bZR9D/hh6mB+hdXpnGAyboe625BI9cBVwOLLbEHQ46sU6O+fSAg9AzgSwiriDkX -43pvkkn8ZSj9p6Y/PPkZwPVmotwNrAHiVNK3USAG9AP7gOOgRZA4MAtYiKn2GbCvFuByhEXkvLdQ -Pjoj9//ITofWtkUoLSADxPp6+MH5Os0AjawubwbwAHCRlfSSJWoM5c/ARwgFS+CiQfPUAaIIrags -Q7jOevqDlnkWALcBqZD9Pw3mKj8H5GaQ5UDMmiufYusgOW8X8CrpxFmvPpq8ei/ntQL3AV8KOWgt -qO4BngXxKPm9PJzyR2CgCA5xVK9A5B7LPBoyE2rfi6B0A0+QSewb/70XrgL5GtBqhUxr9nwQ6EP5 -I5nEJ9MMUI9wwvUID4RseQuwGdhAOnFsbNcrCKILECcDzKnj7U8cA+S8a4BvhSKL2r3WKkZQ/S2Z -5FljgskZ9zrMR7g35OjFgbdQnhkz8QGySSWT2g/6M5Qjp43xuwqLgW/XhJQOqqes5jpeE4JGEbmf -rvzMaR8gLP3KnQgRa7OjwDaUV4aJ5Rtf6WQvj+XXoc5Pa8zBBDGufKNGwg+h+gyZ5JaQU3sJwrct -8GSY25E7gN9NXQ3QlXfIeS24XgfoPOAyS3wBjqO6mUzi4IR8V1EOo/qsjSAmzhi6hbnABZYBBDiG -8ocq4gNkEltQ/kgl9RwBuYh13dGpxwBdXpScNxuRq4EHgX/Gkf+ClDfHAT7n1KnPJuw7O5MKfISp -B3CqWGB8THBRKEQF2E8msb3uJzOJj4CTIVPgEIksn1omoMubiXA5sBaR+RhErxTaGLE/f85PVk1s -uCZSBD4Gbi47meM3BjNCP5dQRtZYqj2IzAox+sypwQA/2yHM7FiBchvChTYc66+jiI0a9fliTNdf -n2/DkRKZ5MCwn+nXQeKyC+G2UJQx3tVXo03aR/l8a41DOHjuM8Avdwml2BXAnQgL7eaLtccl4ASU -N05Q+sgm9o9uf70YJjewFAMV/9lKeP3146SS8w4xsSVgX1jHsmQl+jzWF2bSmTxex+fpQGRR6DkV -2Htu+wCP7BeK8atA7rVoXH8ZJFE+Bp4E3grdkwKNef2i8xD+E3APcAOwmvWF1pFVMEUblk1MSJhJ -5lH22uspMJeI3E2X1z7E9DnOfSENoMBBMokD57YGaBu8BOTr1lYO2g3Yg+pGfN1JZ+o4buFiRKSM -1EmVWh1pBdcr2teFRGQlMJLz6KP0I2VCTAAj6KsgfxMKX9fgMBu30I3QCzITSALLapjxtXMbB8gV -FoDcb4lftMDOZygbyCQLIcupNXF0pKHr+xxD2IRwl9UabaC3ksvvI50aRrLUN4wjEwcKiVxMdX1B -FLgAkeX2uWMhMwEmp7GRbGLLuc0AKt9GmBXahK2o/oFM8mCNZ3wcJILgW1PQ0SDSV8T13rf4gYko -kAQq38P1flMfQxCtCdvGiQN4twKXhhhAqCSwxDK9hnyEOMrLKBtGuOY8RFehzC+bCtEdpFOH/3oY -wPVuQFhBJZNXAP3TEOIDcWdw34C2hDHzVnJeO+kGEMBMYh+u9xzwNzbzpghJlL/DLbxCJrlpiBdQ -nQ1sPhDMeVfYkDLsU/ViZg2ssCZKrY6LgRaA50C2kU0U64TIHTjcZRhKYiGEwgcp4nqfA8+SGX82 -8fQywKO7o1YtB1LQC/oe6eSeeh8feGhFCbeQtx6yAh2oLgG2NeZn9H5Gb9sfgW9YYqgJx+Tr5Lxb -UT4DvkD0IKqtION//py3HLi3BlnsR/U/GBzcSSyWtEzQYXAN+QJTplYim9A6ArPIJsGWMzRdbVLh -wmXAEnLeU6QT2yYvA8SiN1nnzLcM0ENR3hrlrz4B7rLqeQ4iFzbMAN9fqcBmct4A8HW76QE024Zw -LXCtddTF+hyBZhq7BlhfmA98J/Q92Pt+kUxyh/1/3r4a0ZbzEL6BKWcbrKOdwv7KXOA+XO93ZBKf -T9IwUG4IcXEv6Mc8nBjZ7grvhpwkB1iOW0iO6WvTiQ+AdcBfqC4d0/JLxB+X2n/UixCRB6zPoaHI -4gPSiU1NmMo4wvWYmsdBe78+ymGUD0A3ofpFaG98YCHCWtx8x+TTAG4hBcwOSeAJSvrB6B69nLAe -/c12I5Ygcg3ruvfy8JLSGJhgL/AobmE1Ijda7CEW8sTDDBH8r3GGiHGHDedCaV+2kkn8ftS/7cpH -EEdC8q2IngdcD9JXcR71JTLJV2r29UJEHqRSHrcSkUuAdyebBlgTkv4SSp7OVP/oHn3CR9how7nA -i76KaOR6/senYw/ZMsnPSCcexffXAX8CfR/YCXghSRsbEuB61xhTUq72iVg1/+tR//YXPRHEuQq4 -qfwSvmy0pQSmJAa8RdF5vc7zbEd1g/2MAu2opHisEJtsPsDy0JYW7aY3GjfOQGW/hXZLmGqg25g7 -T1nnvcvDibHj5tnUfmB/SIW3EqXTfkc1C/zPHebfeKwSvBWBSC+0zFppPf6ZloFiQDeq/04mOXpY -GderrZMXG4JmmHyCY1HSnTy8uDgM3rCFCoxeRJiHLzOBQ5OHAUyGr/JwoqNj3f93j0Nr9ArgRoTl -IftdAjoQ7iHCAnLeR6QTu8d1f5GqTiC1INUN5LyT5f6AsEaIA8QFk/NfbAkQBQ7g62/JJkevVMp5 -CcxoOqWSAKOGAQWl11Q4Dwtj+8ARhHn2WnGkOVqeHgZ49IsISiRUBacox0f2qLvjRCJ3YLD8eEg9 -h5s3HYS1wPnkvJ3AblT3O6JH/HQD5mUkLNH4K2sZvS6gaIkfAU7i69Nkk14DxG8B7sCUrGuZrdTS -wKTC+2zo2kGlYqgehuWABNcxGkObyyaeHgaIxFuHJHZ1hLTr/9kdJRK5n0oFcIClDwA7UByrEeL2 -vcWYsu7DiBzxkVPkvBPgf0w6taNJ78cfRioZRlJ9lJchsrXBv7nRag8J2fk3MPMJHZTFCLdYJoiD -XEpXz3ayiwfqIKuXIrRRSagdoOgfmzwMII6ifjXMrhIbdoPbY98Argg5jVGUbQivoRxEFZDlCHda -b37QaoQZVqKs3XT2AjuavGsHpaUs/wFupxo2a0EX0iDwgoi+rZnz/Aak/wLrNMbsMzqo7kflebKJ -PuvdL0LlCiv9g8AaxO/DLbxAJtkLwCNehFauD9UxmJoJ2MMPU6XJwwDodaG+++C9dky+v9ajXm1j -34p1Vn0BlU1VRaCP7jtKtFhA5GaU6yyZSqHXICqNVw5VQyuOdRDfAD1qEULHNpmEdIWWUAYQOQns -13Ry9GKSLm+2NWvzyjG8MY//j0yiL6R/DhJhk0UV+4CILZVbRc47bO90tgWd2i0jxVA+waHpZNLE -MkBXfgaO801M82akSrocmYtp4apVptfZBzIkUX2PUulFHl5aHZP/cJEC+8l5f0B43TZ7XGhNgU3p -arNhraD0A1+QTu5nfbfgCPg+SMTohJIPqNK5VMeoV1YA4SxhG2YOwa6qz3UmS7iFtxFZaLVFoN4X -WOahDG+ba0WBbuBFHkoMnH0G6MovxHG+AyypQdgcwwxaqvs3plEj/NlnhxC/GuAZAPbiFl4EXrbq -tM2EZc6x5s1WmBhLtI6uaAYMm2/AHaJW+qO2pe0d0nWmkmeSJ1lfeIaIOMCV9m/8Gj8l2KfdKL8Z -b7X0xDCAW5iPyP029i/WqNrNqL5CNjk0c+U4C22MH5AgTyZ5skGApxSCRfsxVb7jsFoTbAVzXsw6 -tavs/Yl1hJ8dsb+hM3mCX+15nIHIHkRutRojbLBOoLqRI8VN/HTZuItlx88ArtdmnZILqRRYCspf -EP0NmeTw4Z/6BVSexLHmwucY585aAdwe2pMY8CI+o9c4Pri0ZCOEN3C9ZajOR8RH2Ec64U3kTY6P -AR7dLQhXAdeEvFIF3UQm+fToUpw6Mm7JnbDIBQcljtsdB3FQfLKhYRJdBQGNCz6IiCIllCLZlFot -GKnsp8xAuScUT0RRfR90M52pscXrmcRuYPfpeuzxMUA0tsSCJ6WyFVXdyIC/4a9MWg0QJHo76vRb -wOkE8IcQg8wB+ZriBNj/dpQPQxK+0gqCgnYgkrKqPwLsQ+Q10g2atzO4mmYAcb0WFa7BDGLotdf6 -AJFX+fGSyTEA4edbhdY2h85lo8XIJqmCXGpxAAEOVzOAtBsCiylrU+0FP9zVuwCRK6lkGPuptIK/ -STpRmIyc3zQDqLDYcnyv5fL9KBurYtuzsUxFzTyUGUA7olFcr4hw0sDReoBMsl5PgIY0mYzw+wCS -1ho0sXbUbDCD+FP8MzSN5IwxgPFwr7GOzQCgqL5Lb299B2V93lTkdKZOnUbCr0C4yDpfixFmGAKI -ls2TcASkm5y3BdUeK821UHBQvXSyjpk4QZABVPpq6gcGqW7/FgsuvUY2Mf7n/kV3hKh0gDhEpI/0 -xAhacwygzEC40j60LfRkB39/QX3V7zitwPfJeUeBHnx/K9kJqmzN5VtRZ62tk1scksS+sjRLOX5u -taDMSkTyFoMIKpAPofq6dUqlTjh7BHjclpM7plJHBkLk3mmqmqvWMTLJnnE+32JwLgEW2ZBQgAFy -3kF8/ZRssnt8vu/YY34BbkTkWyG06nXSiT+OIJ0zgX9BGAROoewHPkTZTDZRGofULwzV0EVCKjxm -iXoStN9mzqJUcggBpl8ZE6PsBn5DJnFo0uhn17sRKU9Gi9WgFb5FVt+hOPgWDy87Y7kAwSRugnTt -cVS7RwVag0FOJne91HL0Yrp6nia7uL+JzVmO8C17ncA2xwxQwpsI26zK9i2Bkwg3WLCqb4iNlzE0 -opyJlfNusRFWm32GeuHjIuAuorE4672NdCaKZ4IB4oissDfkWG/ZG8VkiHHIyiPbDLGEa8AfoKvw -PNlk4/FxrnAx8E1McqTiuKm+C7wEctxCxpW1Lt9D1NmKcgPC3VCn7UwnDfGvtsQPV1THy+pfabVt -c75l+ltxOARjdzabYABZEfpPBDhOJjnaYKUTwH9FWWqlcJXVBorIWtCddO3ZSraBREuucBnIfZhU -cPD5EvAHMsnhz/0z08SO0+W9AvQh3D+ECSbDyCy3sAgzELOdoGRdOQS6ARGb6tYFZoyOXGQFKoaw -lpyXJz223MDYs2fCMipJiSKNnLSZSSiZxDEyiU+BLqhqh/JBbkeioxc1ut4VIN+sIb4CvyadaOzQ -x2xiEHgHZTPhujwTvZ/9UfEiS4DzqbSQ7Uf0EdCPSSdOkU6cIpPcDfJblC0EiSbV81FdirtPTrMG -YCGVUu8BVMfWnpROlOjKb8JxEpjK4SLCSlTnAT0jEP96WxDSTnWG7PekE38eI7zah1t4FZXViJ1N -ICRQ/omcV8/ncVAtkEn+vOZ+7sAksxwzxyhpgCN3rwP+1aDfRaRvVJEa+v9SKGp5lpKcsKNtws9w -BNfbjHKRMYBSRFkFpa0WmzlNDKA6F5FQF4yMPcbNpvrIed02JHOAAYQrcb0joEUjiRLUvnUAN9um -CQnZRB/0FdLJd5sUtQEbpy8p+xEyrBPoDHUQVezI+Yj5vVZ+75RMc7vZ32YdS0E5BeynMzGcaTyG -Od9gnt2X+WOlabQJFRXuQikh2lwxpupxKx3tlpFusyDOVkQOmo2TFMIa+4D9VBdBvk4m+eIEB8BS -5zdW28kw7wchpRM2ahL6Xe01dZS7CTer6CieiU84D1NBH08jAyjx0I35dtJGU8au5o0B41/I+SHf -JDjbt59KNcxh1H+RTOr98Rnbqr2VEJJXrKuWhVrgqtdqkBarAY6FgC/F93utSesfg4tpcxLMtMTt -QFnE8KeYzEKYS7lCSI8yxrE30XFKjtBsd5HoTJDWkEQEDZqDNdePVAjPF8BrZFITME/HsRuuVJBA -/3lUD1Vhh44j+ApOTZ2+r9sR3ysXkZa0YncfWuzTld8Ourf8fCpiFYnUiT1N8amvg0ScKxG50/6y -H+GruN6BISNsXW+2RWPbygIi8hd8+k4vA5gvi5eJJuWfG19dXiumry5mJc7BtFbFMEWPwTVPGcLg -AduGnbvXnAmqIBTmpyI4B8k2GEZ1pvrqYglhPwfGjte7hU+sbxRgLecj3Euu8KHdCxCZgxlGcbml -R8xoG91BNlk63SbgBFKeaRdBpW1Mf7/ea7UVRCtC6F3Q7xY1ZdESN6TRk/gcoDN5OgYo1Tsi5uxP -Ts0kD5Dz3rbRViDdF9ux84ctwwbVwUGDSi+qr+L7Y64PbMYEHMYMOvKtpI4+4ND1osDFiM63Me4y -KrWAvSgvkKk/NGJc67E9cUrR81C8ITBppaxcmGxnBZX4EId2G/YGXVJRC/0GzBu0pvWj+jKqH9K5 -xD/9DKD0WM8ci0DNboBpWoAHbL19C5UkTBTleRydWOK7+QWIcwNKCkfnAT+zIVPYBAQzAsIaYXIc -HduZKNHlvYlwxA7OTth7C7RWxL56UF5A/c/ILmnKGW/GB9hDJfNmmipz+XbSI+b6BdPBMxhCuCKo -/gmf98gkJwaBW79LcOLXUenejVocITKMH6ChYKQFdAluweTcfS3SmcyHbHMcc/JI0DN0AvUPl6XO -LczETO3QcgCn2kd08AAPrRj782UTRbr2fIpEd6O6CuRCRBfakPQgqtsR2YYvx5uR/HEwgOyh0i+l -wCzUWQQNjXR17HceQnkO9BM6UxMzqasrH0XkFttfF6h1f4ShD2GJL5lOW/lmmYAROWw1RxC1zgOy -BJPOhLfxnRcrjp5calV2yMTIAH7LSzQ5vMHmRo7h5j8A+TTko/ioFm1p/LhWEwygRVPyXZ6OMRch -OSIDqPrALpC9mHk/O8kkeidMZZoTRr5sCTAYCvKDBlN/mHi2toEl3KPQVod5WyuOq0ahKDW/b6u5 -ZgfC1biFPQ0kzEZwDFNjaVw97QzgA1ssYBNM6EyRy8dJD3MmXyZ5CvjX02Ivc55Yx/KeEIYQAfbg -8wJ+aScP11GRzWf+dISgQYe4c6b66GrWec/z8OQ7l3jsYU8mqYiEq2GLwFLUWXpWnkBpQfkOlRIu -ATbjs45sYntd4lvopcY8FK2jeNiCTkdrPl+y7x3GTOI4Va1ZtA9TTnaU6hnHReBaIlzEJFzNyYHr -xYHv22hg0KrOl/B1w1iBiPF7/IVbEfm6VfUO8AklfkvnKA2TucIM4H6QNZYddqP8iuwEHOXmeq0I -P7LhcsmGcltRnpxUJWc0WxTqM0iEtzCjWbFMcBWO7AS2n0Hix0C+QqU66QDqb6QzNdCA5ggnW9RC -uvEJua9Mog/XewohY03kALAa0Txu/kUyDTi+6zwhyiygHaUdUR+kD9VTOM4xHlo8IdhFc8iXSU92 -Y2byx60qnAPchFuYdQYZeJUt/1YrxR6ZVHfTum8iK4J88sCroav2gdyENGAqc147Ua4Gvgv8E8I/ -gvwU+Efgu6heTc6bdfY0AEA60YdbeBORVZYJ+oHLEXl7COjiFqLAJXZAZzDVIk8m0VwI6ObngNOO -6aMPrjGAaOONk/USrROJB3YmSuS8DzHlbxeGHOY7cQu/HLYL2gyUuANTFhacnFY5Rsfs9yrgY3Le -S6QTPWeHAcz9HETZhXAJ9dOoFZBFeMCe6+tgJng+BQ10ylbi/Lk4zgoLxiywwNKCEClL6BiLU1TC -A6Em/tiWdOIorve+DZkj9vqrQL4MdaaE57wOzIjbYFzOYAj1w74X7PMaYAZd3pNkmz/scrzt4eHE -qe2mqVciJr71npfYzy/BZ1FDDNCVn43INYistMTvCEHJ4XGvam36WPxfJwSvzgC9Frdwwv6yj0zy -/ZAW6wCuDOUQtpBOHm1A02wxoSBXEWD4wlpc73MyiZ1VQJbRaOFZSTFMZ3CBYDSskf6gDH45wlq6 -vKfLs4bOiA9QeTozWKmyo0dB6knRIOb0jmBIUiuOXksuP2MUb/pLOM7fIvJVTPNHMCUsdOZeefxr -3Ixib/TWNVyp4wOzELkJkTvta201v8hMRO4GuQPkHuBa1neP/n3ZRC+qL1ufKZgsGkX4Jv++J1RG -JrOBL4e0kQIbUJ4AfR7f34A5XPJxW4QTtLutxmHV2TIBDpWOFSM19SprM4kiOW8L8BWLlhVBVqFy -H+u939NZ0zv3s+3CjBnfQPgSlcaIoiV44G/sRfWQma2r/XbGrjcm3YVGaqqC4nV+rkUCbcQhXyYS -+dyas1GYIHkQt/CGzeN3WAZeQCn6XVwvj2k1W2QP1TC+grIR9A0yybBk9+HmjyBOO+ZU9V6juVjB -o95Wfjj2CarOuE1A+BpaN8ce/G4f8ALhUmzhUiL8HW7hS1WfnTnjB3Z4VAtBylaJoXyO8hjw34FH -gScwLdzPobxipazRW/dBWqgev+KEbG6kzrOGh1a2odyB681oEED7CGVbSL37CKsRbke4NYSpCHAK -YUcN8cOw8J+tYyhl02BCxjOsAaQ2harxYZkqk/BxC+/ZwxJvCzkz8xF5ANe7G3SrPVjp4pAajAK7 -EH0OpBtfS2SSzfnrXd2zkMgSYB6oraeToMr4BMpOK1X2cIsqk3EE4S3gcqv5igirUL2GdfmNPNzA -nD7VDfb8oGDEq1NnvwRT9DoSllHENNsEJ6G3gp6FYdGqJePUiOFEkbYRr5lJDpDzXjAxMbcRnrYt -zAC5piZIGwQ2MsAL/KhJhHF9dxuOcwMiV6PMtU6ohFR/kF8/ivAK6WHOKcwkT/Jvu54gHt8L5ZPN -BxC5m6h8jkmTj2YKjuMWNlrkMjas26jaMgptxGrHYJ+KlpHPtBNI0Q5NDCqE56KjoGnpRIl04lVM -h5A3QvTdh/IS6cTz/KiJDuLHvCiudzORyH9G5C5gzihl1qPDQD9ariifAltDBPJR/pacN7NBU/A2 -6JtmbH359QFmbqARCJF2RJbQ5Q1lgq6CWIh5bkiLHEb9E2cuF1AJXWI4zq3AndYxawN+je+/RzbV -GEfmvLXAV6k+SrWEGavyXFP3lSvMt7n9iwl6ECvPWiJ8yEPFtncDT5BuIKbOeasxzalB+XYU1Q/R -4n+QXdakefIWI/yDPfAKVAeB3yFsRbVoI9YIyAILMXeEdMZTZBKbz7wGyKYGa2L5IspKRBrH1NOJ -jcAWwpk1pbtp4rteEuT7wCUhp0ot+tiDOZPoTeAd60g5VYFXY/f8mYXBNaQJr0KiFzTvT/lHEX2H -oAxeJGZPBvk+ItdbZPAe4O+p9EY6mNxL0wdHTcSgyMOWCeZYabvIlmE1Bkys3xXD5PNDfQH+M00S -fyHCvfZ6wYSu46DbUN6uKjzNee2YtrDZTX2Xz2YclmFG1VqAR75Fl/e/yTZR7JJJ9ZLz3rT3tKQS -+spqA7GXAbUilakm+0yHVKLpUXvjL4Mu+fvRsP1iNsKl5AqNmRcntsaq/0CadpFJFZpQoW3ArYYB -y5NLDqE8TTr52yFVx0q8ph5gbOYwmzgAbKIyigbrZ9w7Duh4P6pPoXxOpaKpZJ+nv4JBlE3Ws1Vo -4llhgM7UKYTPQ+FTH7AWlcYkyxx41FK+H+WtsRM/7yB6iR2nEhy7chx4jkzio2G97dF770Yj2IfW -pIRj+ytwvSubvmYm2Y3yO0wmsdsKRhumZczMHIRN+DxJOrF1vOSbmFnBvr8Vx7nM2l173p/eYYGa -EVR2dzsmoVOZ16OlsXO0I60gt1A9TPlthD+PgmH443aIff9PiCyzU74DjPE2urzuhruMhmqXg8Dz -uIVPEDkPpcPWK/SieoCIs5v04gkpL5sYBsimTuB679ni0BkGr5crcT2PTOKN4XGEyHm2tSyQxENN -1rfPxzSbBNmz3ah+TPoMVCdlUydxC88BD4XenYfDnTRyitjI2qCASQSdtjVx4+IziU/sMarXlcMr -4XZy3iDpxNvDSGFbNeSqzQ2LVvYj/CuqF2Jy7x+TSe4d5W+kHHJVAKFmCbUNt/CO9dYDZPECXO8u -0JNAEpE4islXFPVDfpg8xSRYE3xiiGxAdbE9LFqtbb+LnDeTdOLFOgwgNUQpNUmAPmA7bmE3sJGG -SqjVDIeuDLsYHyai+owt1phbhmeFG+2gSjO2zqCQlxKTtbiFt8kkXz7bDDCxzZDpxQOoPoGpmpUK -Ts1act7DdHmLagherJI8kZZxqsx+MsljZJKN1NBHqK7jH189UDY1iOrjNXtq0tSVvQgg3FmI3ELO -y55bDACQTR7BnNt7uEbTnI/DT3C9LG5hjbN+dwyfk1SSSWodwjOzRKI2PTtxhWCZ5G6UHuoneMLT -O4L8w0pc7wfnFgOY8OgIJf4XOiQ9G0W4AJEHfSf234jwI6rP0WunqzDnDD37HIZ28oxvdXmXW0c4 -7KH34PMMvv4S5U925KyGjOAqXO/yc8QHCOMDiQHgEVzv63Y2YJD6VBsv+wzNuZcQrrAx8OlbbiGO -6oVI1azf8bOCw+1UilR94H3SiSdrPvUKOS+NKe0SKxRfwUDL54gGqI4OnkX1FxbdOkWl8DJQiTXO -VzklfDr1f5uFWBLAM1gAAAPJSURBVEtDFHWzK1dYCJwXsvUHKJWer/vZUulxqjuZZpMrnHduMoCx -jXkyiS58fx2qr6FsRcljhh8dryHCXHKFG04v/bkCYQHVCSgdlwZQOT9EVAUO0rmkfk7AvN9T5Q8o -iXPLBNT3lHuCB2/51afS3z9nIY6zAFMMuSxEoVvIeTvGOva0MfXvpTDFKH0TzFTtYXzQnkM40uqr -EcSWc1cD1Fn9D16mZFP7SCc+w+cpqg9oMH17E9T9UlHT3nx7dHsLEz0WRjkRMiKRBianzKtimDFM -9zwnGKBaMyT2Yg6BjITs4vnAfXR58ybkO9Z7SzCVtEtDtn/imsFUd9RcbxGut2oYLbQmxADBaNhd -Z2Prhcm0ct59mOqgvrJfbg5yeJVMoumiB9zCtSA32hAtKEaNWKmLWRtsKoIyzXfZ4HoPIawu+wJq -M3eq2ymVThGJzkK4BOEmwnUIykdkEo+f+z7AaMvnBRziwE0E6WVzbPy3cAvbUX2bbKp7DIRfjchV -mJF0bSHil1BeBS7GHH41QeLkP406y6w/4COcB3wNketxoiW73/OhKgF2DOX5s7Xlk0sDmHCqA+V2 -RG6hMmjBodIkeQjVz0H2IP4h0GPse7PEnJvixJgNstAeaLGMyjy9wNt2LPEfxwymfBAhZX87fg1g -tMAKW7NX6WmovDT0MkOy0fWkk7vP1nZHJx0DpJMncQsbzCEJ3BvK2zsY1HCmPZSxCI6pIVj0FQkx -dDCIKhoiPFbqulF9gkzSw/XmnBYBcHQXKuuB71lcoHZGUdB1dBDVx8gkvbO53ZOPAQxu0EfX3k2I -vwe4j0quX0P3Pdq9h2cQD6L6FCKbyZSPppE6unD8kcFDSQV24xYeQeRG4HpU5yASIA09oO/ae+k/ -21stTPbl5qPgXIZwK5V28MYiHOUE6LuobiSb6q1R1XOBH1jHEHvw1RPjmuY13HpsTyu+E+NU6RQ/ -WVaaTNsbnfQMYMapfAh8iFtYCqyxE8oWhNhY7QEOGKdKv0DlUwYHd/LjBo9Yl9M4Mfahpc0Njp5m -gCGmYQ/hFiw3PxucDhtB9PLFkSP8y6VjoWJNLkKUKbaif9V3n0kdpZFDq4YLOs3fthKcfwj+NANM -maUngGdCQyUGETnK9Jpe02t6TZklU/bJH/OiKAtRDRpZ+4GDIZxg2gc4p5dPO3A7IkGlcg/wHKai -eZoBpsCKYGr4F4U0wJTbj6nMAEBVf+BYJgScM8thek3pNc0A0wwwvabymuo+wEiHPE8zwDm+SsAR -0KAc+whIcVonTK/pNb2mzvr/Sc4lJD0JFvgAAAAASUVORK5CYII= diff --git a/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-32x32.png.base64 b/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-32x32.png.base64 deleted file mode 100644 index 598dc9165fe7f0925294b30f193892219c6f0545..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-32x32.png.base64 +++ /dev/null @@ -1,24 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAABmJLR0QA/wD/AP+gvaeTAAAACXBI -WXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5QIGDS0sz9yJcgAAAB1pVFh0Q29tbWVudAAAAAAAQ3Jl -YXRlZCB3aXRoIEdJTVBkLmUHAAAEtUlEQVRYw8XXf6ifVR0H8Nd5vrvb6Eort7HOvckoytAFAxM0 -ZNpiWX+I/fpDqp27MaEIwjkVtRQjjWUzGIGlaNh2nyhKaOZCilbzB3OMJmVz0Ia2yrWzldW0/WD3 -7vs9/XEfr997d+/d9DJ24OF5zjmf8/m8z+f3w3TGYO5X55unw6IyvTGEg+cSwH+weToMZpwxZZ0r -XKpYLHgNO3QcF3we688ugDrPwNexRyhbCb24TuVVnJiA/j1Yit1S3DE9ABsOzMAAfiXFbV07z6vz -ahwf55hz8GWdzgOq6lp1bktx51vzgcG8RCvchwtHhde5V53f1lD8Fq0xZ4KF2GVF/z6lPIcFb80J -6/wxwWIprkFP184HsRyU8t9TtVj24RJ1vl0In1Q8q84tdZ6nzq0zM8EPXwq4Roqrm5Vj6twrxaNS -3KHOfeq8jvIihsceDrfjYZ3Ofiv6X2suczeO4B342liFTXz7KzBfio8186vxAXxfiu0uukvxYTzS -rCzHS1Lc0uxf35jgT1L8ZcPnuBSfOZ0JzsP/GiafxiK8gLXqfEsX3QH041al3IqhLuEfxWzFo7jI -j3IPLmxyxxQmGLnVUgw2K0ukeFPzvXWsuRG8LMXvTRC2nzFstVWxrc7bFXfid1LcPbkJ6jwL67ps -P5mD9igC5gm+gt3YRrlc8bwQvoBNUnzuzSaiT+AnTQguwyEDcdc44TdhvqCDDv5ueOinqllF6BwU -wrB2e62V7z42eubBHPS6oQnZ9VIsk/nAh5rbMBC3CK5S58u6hH8D26X4VSnegacwbNXCk1qlqKoT -Qiiq6rg6V036pteNeErxBG6eSgPvdOzfR7rmW7BSnS/CxXhcitu79ndjmTrfhX3YgUua9c+io84j -fFP8Y3OJL04FYMjsuTNxwsZ/zMQybKL8VerbMIbykYMtpRTBfine37Wzt3nv6tLcUnX+ZuO2m6cC -8KLKQuxVVVfhSSm+MC499wluUcpDlFcIF6jzCjyNK/AHDOi4x4o4os0Ut6rzM833yakA7GyY7JXi -byZ02+BGpdxpoO+Y+sBcvCyodRDsRxuPqVyP774RsoWBvpNT14IUf4/Fo2obzB/pUuN7m/brXwb6 -Rjy8hJFLtDuzlFLRaUmxM+ondb68ed8rhNvUeZ37c3W6WvA6yicF31HnDt6Pt+MHY3N/mUuYr6o+ -NZIHwhqsAW0PaFmrzsuwTYqb1flqcyxpomeSWlDne7XdZWUcaqrXl5CluGlMpRxx0FfwqhQfatY/ -jndJcWMzP08p/UL4nNK+T2itxs+l+OepasGzWq5sTNLG+8YJX4xFUrwNGzC7y4S/xgXqvKiZHzHQ -twc/Flo34Bfdwic2QYqPq/Pd6ny46WQ6p0TKyEMpPUIo4/YfxBp1Poy/SfFnUtyLb515QxLcgyVN -5mNDntkF8KgUjzaE/af2A03/wMOYNeqIb6ojWh6Hpbhex7fxtJY0CdBrJtBQH/ZL8TD+gnlTAQhn -2BWvwvlNqj3UNCdX4glcPEE5fr1nWIA7pDg0PQBvtNqXNeG4BzsVcwTXSfHU/4LBAwucDP+0KhZn -bQzmGeo851z+mp2Pa88lgJ7TOdnZBnCIsnE6DP4PRyPEcw8t2PkAAAAASUVORK5CYII= diff --git a/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-64x64.png.base64 b/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-64x64.png.base64 deleted file mode 100644 index 769950452970288b5c15603ea996c8f56a9bb58f..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyDeepLearning/logo-64x64.png.base64 +++ /dev/null @@ -1,63 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAABmJLR0QA/wD/AP+gvaeTAAAACXBI -WXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5QIGDS0XftdgVgAAAB1pVFh0Q29tbWVudAAAAAAAQ3Jl -YXRlZCB3aXRoIEdJTVBkLmUHAAANRklEQVR42u2be5BU1Z3HP7/bPQ+eQwiv2z2AoEaECLIkQdeA -UdSovHbXLfMo7p0ZshvMJoba7Jq19o8taytbZcWNKc2uWElWpvuaWleNEFejosQnIbobw4quiEQN -0H1QwSAzMI/uvmf/uL8Zenq6Z9qZgdmKniqq6el7zz2/7/n9vr/XucJojnR2OjATOIifODQaS3BG -FQBkHiJfQuTc0VrBKANACOT080MJAIAA9sMKwAHgKeAgH43RU79R9AJmNsI84DU8963RWEL8lM38 -47eE2rpGYA4wAegAfgccwHe79arZwAqgE/gDAiAwi4DPA1OAQpGmOUAbgdlJu2xX7guxo8aBpwCA -wFwDLNG5DwJ7wR4BqdcdXwBcwni7AMv/RuBUaYlBZjI4nwTGYTmI7+7+/8UBgbkSWAYUsHYzfuLN -ftekMnU4zlcAt1crLNvw3acHmfsCYK1qlKOxw3sU8rfTPDM/+m4wbaapPYfA7WWFB2hKduG5dwDZ -qv1/2rjAnwF5rH0Oa38KvAtMIha/dnTjgFRmLGnzWYQvAt3A43ju4UHvKxQ2A3VV6aFwsc79GH7i -UfzErznO7UAMmM9d2djpByDIOqTNGhznJoSrgI8DOSy/qer+5sZO4LcA2EEhmKjXnUyYrnMt1h4F -QuIy6fSSYKupAb6OMA04AbwOzMfShu+2lRBXDJwvAzV47l0lMx1SYhx4WN5EmImwGNinZvFxhI9h -yeG7R04vADG+DkwHXqNQuAdHYoizEKGj37VeskBg5gB1BNmz8BL7TgpmjyMyuPqK3Q2yAlhEYJJA -u6bRAjxxet1g2lwNTAX24rmtRSwdB8ZXuOs+oBnEJ23ux3dfUsm6Bn3e3SaGpVk5IAQaIzLEAX6O -7z5btIbPAAuVXF/Bc381sgCkD9YrIXViw38v+dUADQRmOp77dl8tcF8lMFuAtQjXEpjVwF6snQxi -B/QGlg3AWKANz72FVGYSIhPwEwdK3KQPnKNzCTCHwCwhxybWu+HIkKDElulOPI+f7Cz59RkF9NKy -90a7cRuQAeqBRYjMHLAWEAGVACwhd6gbPdpf+OyFwHzgGJZ7sLYVeAdIUMPnR9IEzgMEy+4yAr5A -YK4GziMwb+G5O8tccwjYRDo7ESEJsgBYgpQBITALgQv12w9oco+XmOIyhPm649N01/8NX11wKnMA -x/lH4DzSmUfxk3YkAJgaRWL5bEltbzwi64EaXcgaAjMX2O7ku98JW2b3FdBPHAOOkTZTECxwoQp8 -UvFhRgS2TeEn3i4BxwWuBCyWMQidUfQZdhYFXJ0EpjuS0Ylr5WnYAET25c8sFO3ERIRvAbXA77D8 -N8IyjfkXhPHadgKzB899oEIgboHJ6lUs1oJIz9oexE/sKSFFB8s6NZ3dwDYsqxDmI85y4Oe6rgs1 -UDqM7+aGbwKBuULjcGjNjqU5cUKF+DIQw9pn8BOP6tW/Jm2WIyxVG357ADhjWJ5A7PNY4jqXBTrw -9Rl977kcaAC68dx7dW1bgXmqSYsV2jq9fuvwOSAwTcDZev3LiM33qj7MAmJFwquau88oMUIqWzdI -qNuNl2irYh0NwHIgjmVTEbe0E5hbga8CPVHhe1gCfPed4QEQmKvUvZwg5E6aiuP8sIPQ+S5iCwPO -0ZToGlD86ssBTWr3z+K7mRKCfQ+4mdZsA46E/SLSIaXDgZkO/DXQSRjeQlPy+AiXxJZpHtGuJLUd -z30xAt2ej7X34Sf3kc5ei8hs3d0jeO6tp6sgsjqq2LBlWMIHZgrwGWAmlhjCMeAFLAVlgnoNdmr1 -jjEgDepVQGSsltUKwD1VPfPOg8J1jXboAKSy41T138V3/6dk5/4B2IVlG01u5yDCrwGWKiM7SG9B -YwFCuxLVM9jweSAylZBHkHA7oRzXnOFerI0DhQE3ImXqcFgDzAWEwOwHHsCrvMb4AMaxRKO+l8v8 -lgDqEBaTynyfpuSx/jH8fgdb81WN3XNYtiLsBlsAmQN8qXfHhW785EmbbXI7tIjaEzecqELLaoGN -mjqPUc0aD5zBZnMLLeVdYeVQWORs/X1f/2IG1wM7gRocp7nf75tNDFtzvbrA3yPyHXz3v/DcTrxE -Ds/dS8h3iyKB3AiY85Uq/BuEhRvx3L/VmuM4YnbVUDhgOpAntP13t9nNAQ8RmDOipCPTgJd8X9Vw -Ig4bdDGdILexbkb/ml2T20HaPImwAmEVgVnbuyZr78VPvEBgWoCF2HATfnIv6ex1iJxRgchzyiUB -TY09gG7FsgCRc4YCwHggh9iBbPydCCjnBgLzhs43V8nqMKFsomlGfgAzy6mWdav9i1aJe9xqB3Ac -nJ45uqLvOGUyyLgmTVIUNFkcQsAhbQTftUMJhJxBtER0YWfqoo5g+SW+u2PwwFosYmNYnqJw4lny -jkO8RnDykcC57vuI2Z/SGUbfC/kAJxQKcYjFVNAQjrblmdzwJ8ASHL5AkNmKFRAuVxn3lRN+MADa -gHHgjAXeK0l+piKyVAE4QKHQSswZjwX8xMDlqc2HammZoZ0h25MkhbScWegNtXvG+tl9/9YyKz8A -CT4MfAI4F5yzNbyqAY5RCH82FBMwwCcQPkZp91bkxsi+6SYkRXNjV68Lqxz0fBLhSghfB3RBtqCZ -T5xUtp68zfOVZJ5UphaIE9JFS7JAKlMHOBSsZX1jZ4V6Qxd3m+8RsgrhXNXMlyjwnzQnuz84AJY3 -EM7SWGB3Ge3YRYGHlRAHi/hWIFymav9WH38CeYRLEFlBrTwE7MRxVgNLcexm4FUcZx0wB4cYaZPC -d/eUfc46Nwds0X/DLYuHv1FWPYfWbLwE7Zvw3K1VCn8WwhVAB5Y78d37i6udulNhlM8rsUWfhSKi -y+v/cwjXcJeJMUKjsgb4yTYCsw+YTUw+BfxqSE8QrgW6sfaH/Q5CibUgcSw/wy8qYPruA8ADRYCn -1M6/BjRSw58C91cAfAxiLV6icyQaIw/qDq0mna0vIsHJpE1DFbs/V+OBbIVTYFZBqq02q1Bt+RSB -mdUv3wjMRoTvgPwTgdlIYBLDA6Agh4AjQC1CrIgEvw3cQKsZU6ZVNpHAXE1g/gahSQOU31dwgz3n -g8aSzk4nnR2rADcQZF3uNlJieu1YnlaTaOJfD4kKPwH4pnqlt4n6jjOAvyJlpg0dACd0NF5vw/ap -4b+MADFWluz4chznBuCzRN3fbi1MtFdg2pgKcxEi30RkkQJ8GchGLEv7m6b7hHqoOibaPydtphN1 -jWuAp8iFt9DVfSuWB4E4DmuHng6LyMmCRZ844n7gRuB8ApPA2icRORP4tFLYdmAH/gCZYtpMBTtd -J25Xt9qTAL2vJLiaVGZ3vwww5Cc43AAsRFikZhFi2cn63urvTgKzCphGKltLU6L7gwMQhgUcpxth -MlacIlXsJDA3A+uBJCJfUEHeB36A73ZUSFfrNV1drAj3BDk78NzniuZ/gsCMAz6tZwluL8kjjhJk -t4Gcr8+dpNlpnbpoCEyNakWesHLFavADEoHZAMzFsgm/zEGmwFwMXAaECDezroLwgZkNbNDdymse -UQ9MwfJYvwMSgYkB3wbGYe12/MSTlbUpexEiKyO+sluw5BBZq2a4E899aOidIcsrQAHhggpXjFMg -H68ofERE1+luPaZxxB1YntNCSbnIrqCu0EHkClIDeB0/sYOoQz0FZAMi3wCSwH7yWiYfMgBiX1Qm -P58gU64PvxiopSv85QBPWafCP4RX1MyU3udLBRBeA3YBORxaBlyn524G/kOB+C2whUP8kBY3HF5V -2EucIDDbgKvA2UBgvofn5nVnG3q9xF8kyz8oyNQquR0u2y4bbORy91FTMw+YQmAux9q9IGMJ2U9z -SbvMc3cpYFWP6pqjnrtDK0PjgW+p64kqfCdD1Qr3Jrvx3E1Y+/3+5mXrGeyc0PpZFkipqSxH5C8R -1hHj77UXOaxRfXc4OgvwOtCAsJHAfA2rbu9kM4IB7NSWcbPT+qXA5cYxOaDkKUSHo/boc/+YtLn0 -9AAQgZDWAKM7cn9crDuTI23mfeCnW86qyhdNtJcqAC/iubfhuQHYf1Yd/NzpAyCKxJ7Hc2/C0orl -EWA/0TGnlR+wV/BHSO/hp8Hq92doQPZKETcdBY4CQjo77fQBcBKIffjuDjz3Rxp8TCIw1Z3ZS5tZ -wDWqSdWMNtWUqaW6EQFj209VZ6jasQn4O2AhgZlEGG6hKflu/3LYgRjx+MVEp0hEg6Hk4KZif4HI -EuByAjNeS3QXKQ/soSl5YqgLH7mjsqnMJBzneqK2tAUOK2ke0781asU4psD/BMsEhDVYHqniqOwC -wNeYRFR7M3juvwxn2SP/vkBgVmo2GNL3/I+o4LtUQ7q0OboSax/BTzxdxdwTsPYCkBrgTXz31VNX -ERrq8NyHgYdJZxeAzADGIfYE8C7vy0t8o095WjTUrXbuNuDxkVzuqXthwk+8AkWsXX4cAJ7Uz4/G -aIzRfmdojtbw9+C5b4zGEkb7tbmZwCVE54z4MAJQWv//0AHQ489HzRTjowzAPrBbGaVX5j4awP8B -/cDmGnd36/sAAAAASUVORK5CYII= diff --git a/Golden_Repo/j/JupyterKernel-PyParaView/JupyterKernel-PyParaView-5.8.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterKernel-PyParaView/JupyterKernel-PyParaView-5.8.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index 02ad2914a3351aa6ab1ef393d1bc817069fdb2e4..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyParaView/JupyterKernel-PyParaView-5.8.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,130 +0,0 @@ -easyblock = 'Binary' - -name = 'JupyterKernel-PyParaView' -version = '5.8.1' -local_paraviewver = '5.8.1' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://www.paraview.org' -description = """ -Special ParaView kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -sources = [ - ('logo-128x128.png'), - ('logo-32x32.png'), - ('logo-64x64.png'), -] - -builddependencies = [ - ('binutils', '2.34'), - # ('ParaView', local_paraviewver, '-EGL' + local_pysuffix, ('gpsmkl', '2020')), # ensure it is available -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_kernel_dir = 'pyparaview' -local_kernel_name = 'PyParaView-%s' % local_paraviewver - -modextrapaths = { - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - ( - '{ cat >> %%(builddir)s/env.sh; } << \'EOF\'\n' - 'export KERNEL_DIR=%s\n' - 'export KERNEL_NAME=%s\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_name), - 'source %%(builddir)s/env.sh && python -m ipykernel install --name=%s --prefix=%%(installdir)s' % local_kernel_dir, - - # write logo image - ( - 'source %%(builddir)s/env.sh && ' - ' cp %%(builddir)s/logo-32x32.png %%(installdir)s/share/jupyter/kernels/%s/logo-32x32.png' - ) % (local_kernel_dir), - ( - 'source %%(builddir)s/env.sh && ' - ' cp %%(builddir)s/logo-64x64.png %%(installdir)s/share/jupyter/kernels/%s/logo-64x64.png' - ) % (local_kernel_dir), - ( - 'source %%(builddir)s/env.sh && ' - ' cp %%(builddir)s/logo-128x128.png %%(installdir)s/share/jupyter/kernels/%s/logo-128x128.png' - ) % (local_kernel_dir), - - # write kernel.sh - ( - '{ source %%(builddir)s/env.sh && ' - ' cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.sh; } << \'EOF\'\n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module use $OTHERSTAGES \n' - 'module load Stages/2020 \n' - 'module load GCC/9.3.0 \n' - 'module load ParaStationMPI \n' - 'module load Python/%%(pyver)s \n' - 'module load Jupyter/%s%s \n' - '\n' - 'module load ParaView/%s-EGL%s \n' - 'module unload VTK \n' - '\n' - 'exec python -m ipykernel $@\n' - 'EOF' - ) % (local_kernel_dir, - local_jupyterver, local_pysuffix, - local_paraviewver, local_pysuffix), - 'source %(builddir)s/env.sh && chmod +x %(installdir)s/share/jupyter/kernels/${KERNEL_DIR}/kernel.sh', - - # write kernel.json - ( - '{ source %%(builddir)s/env.sh && ' - ' cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.json; } << \'EOF\'\n' - '{ \n' - ' "argv": [ \n' - ' "%%(installdir)s/share/jupyter/kernels/%s/kernel.sh", \n' - ' "-m", \n' - ' "ipykernel_launcher", \n' - ' "-f", \n' - ' "{connection_file}" \n' - ' ], \n' - ' "display_name": "%s", \n' - ' "language": "python", \n' - ' "name": "%s" \n' - '}\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_name, local_kernel_name), -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - 'share/jupyter/kernels/%s/kernel.json' % local_kernel_dir, - ], - 'dirs': [ - 'share/jupyter/kernels/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-PyParaView/JupyterKernel-PyParaView-5.8.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-PyParaView/JupyterKernel-PyParaView-5.8.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 4c2d4960cf73a2d1b61447270f83976056828b09..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyParaView/JupyterKernel-PyParaView-5.8.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,134 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'JupyterKernel-PyParaView' -version = '5.8.1' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://www.paraview.org' -description = """ -Special ParaView kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - # ('ParaView', version, '-EGL' + local_pysuffix, ('gpsmkl', '2020')), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -components = [ - ('logos', '1.0', { - 'easyblock': 'Binary', - 'sources': [ - {'filename': 'logo-32x32.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-32x32.png"}, - {'filename': 'logo-64x64.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-64x64.png"}, - {'filename': 'logo-128x128.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-128x128.png"}, - ], - }), -] - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': True, - 'download_dep_fail': True, - 'use_pip_for_deps': False, -} - -exts_list = [ -] - -local_kernel_dir = 'pyparaview' -local_kernel_name = 'PyParaView-%s' % version - -modextrapaths = { - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # create kernel skeleton - ( - 'python -m ipykernel install --name=%s --prefix=%%(installdir)s && ' - 'mv %%(installdir)s/logo-32x32.png %%(installdir)s/share/jupyter/kernels/%s/logo-32x32.png && ' - 'mv %%(installdir)s/logo-64x64.png %%(installdir)s/share/jupyter/kernels/%s/logo-64x64.png && ' - 'mv %%(installdir)s/logo-128x128.png %%(installdir)s/share/jupyter/kernels/%s/logo-128x128.png' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_dir, local_kernel_dir), - - # write kernel.sh - ( - '{ cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.sh; } << EOF\n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module use \$OTHERSTAGES \n' - 'module load Stages/${STAGE} \n' - 'module load GCC/9.3.0 \n' - 'module load ParaStationMPI \n' - 'module load %s/.%s%s \n' - '\n' - 'module load ParaView/%s-EGL%s \n' - 'module unload VTK \n' - '\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:\$PYTHONPATH \n' - 'exec python -m ipykernel \$@\n' - '\n' - 'EOF' - ) % (local_kernel_dir, name, version, versionsuffix, - version, local_pysuffix), - 'chmod +x %%(installdir)s/share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - - # write kernel.json - ( - '{ cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.json; } << \'EOF\'\n' - '{ \n' - ' "argv": [ \n' - ' "%%(installdir)s/share/jupyter/kernels/%s/kernel.sh", \n' - ' "-m", \n' - ' "ipykernel_launcher", \n' - ' "-f", \n' - ' "{connection_file}" \n' - ' ], \n' - ' "display_name": "%s", \n' - ' "language": "python", \n' - ' "name": "%s" \n' - '}\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_name, local_kernel_name), -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - 'share/jupyter/kernels/%s/kernel.json' % local_kernel_dir, - ], - 'dirs': [ - 'share/jupyter/kernels/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-PyParaView/logo-128x128.png.base64 b/Golden_Repo/j/JupyterKernel-PyParaView/logo-128x128.png.base64 deleted file mode 100644 index eb55970852c1f47ac4e00c4eb99e1062ac942be8..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyParaView/logo-128x128.png.base64 +++ /dev/null @@ -1,229 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAIAAABMXPacAAAACXBIWXMAAC4jAAAuIwF4pT92AAAA -B3RJTUUH5AQCEAgKKqJdGQAAAB1pVFh0Q29tbWVudAAAAAAAQ3JlYXRlZCB3aXRoIEdJTVBkLmUH -AAAgAElEQVR42u29Z5Rd13UmuPc+5973KqKAKsRCFVIhRyJngCTEIMuWljXukS1pJLdlyZZ7eVn2 -uMczy251tyW3renxyBqLLVmeNXTbbXnUa0maHpEKzCQyQOScUwEVX7180zl7z4/7Ur0CQJAE0Wta -dRZZKBRe3XvP/s7Z8dvnoojA+PivN2hcBOMAjAMwPsYBGAdgfIwDMA7A+BgHYByA8TEOwDgA42Mc -gHEAxsc4AOMAjI9xAMYBGB/jAIwDMD7GARgHYHyMAzAOwPh4+EN/QNe11lpmfP8XQhRmItK6/lGN -Me+f1ISIzIyIjuPc7foMgA/lFkoppdQjAuDw4cNf+MIXRnIFx3UACRAAEBDjuSDGf8RPBwggpT/r -hwBoY5qSyW9/+9urVq2q/acXX3zxD//wX0aB0UoTImD5/5pLlW5UI0KsXjh+JjEk5Kqv/cXXdu3a -VXv9o0ePfv7zv5kdSbmuS4Tx1UozwPh3KzOo3q/u4eMRMrqJ5HPPPbdu3bpHBMDPXnrpyJEjn9ix -NsE2KhYlshAZsVasBWPZMAuDEbEMLCAoDKW1jCAiMT4CkiT6aSad7OycNnVq3S1+8P0fnDl3+mOf -eKIQeZ4NAxOFbCNrI7ZWrBURYSvMAhYEBK0AIBgAkJJoBEA05Y/3dkStc+fOrbv+T37848OH3173 -9MetkFcMwtBGxoaWjRWxzMyGwYgYFgvCIvEygngWIiASryxF2l49NLOzbcaMGY9OBf3spVc2LJ77 -/Jc+4/X1ecMjnCuYQpELHvuRFH0TBBxaG1gJWEIBI2KBRUBABIBZykso4dKxTHrZzp3Tpk+vvX4Y -hm+8/tazH93+le/87oXbN4aKuUxQyITFvO8XbOCZIDA2kjC0JhSJGAxDxCCIoYAVYBAQEESTdG78 -mbeqYdlYAPbsfmv2ouX//E/+um8wPZzKpbNBvhjk/SgIoiCIQmN8K55lz5rQimGORKwwMIsAxvMh -BAGN2g5c2bJpbWdn5yMCoL+//+0jb3/+qY3iFUdS6ShftIWiLXrWD8Qz1gvZRBwyR1YiFitiARhE -WASFAYQRkAGSABeLwUWA39mypbqvRRDx7Jmzl69c+sRv7UplMiPpXMYv5COvEPieCXwTBCaKrAlt -GFqORAyLEWBBC8AxxiAsIAmNvYXg7MDTf/x03RTu3Lmzd8+eTR/7TNEP06lUPut7hcD3otAPTRiZ -yIbGRszWWGYGyyCMIuVv4ikAADA5unA7KPQ+set/eXRGeN+Bg/lMesO8Lm8kE+U9WwhMMeTAWD/i -IGRjbCQQltaMWAABEREBsbGES7rUITrh5XVjw+aNG+sAeOut3YJ2werugUw6F/r5yC+EYdGEngn9 -MIrERNZElk2ML5dWfQyDAIAggJDCYm8Oi7h+4/q6KZw8cWIkm+tauDKTLRbyQdEPikHkhWEYGWNj -RceRsZG1lsUwGxEWRmEBQGEBYQAUIFI8fDXp0Jq16x4dAK+8+urkCU1LOtoyV/r8/hQXfev57Pkc -RBwxR1aMgBWxsaqMNU/5a2w5BQiRAQ/nc93z5q1asaLWowCAV15+pWdht2p3zg3f6c9lMmHRj0I/ -CkI2hk1k2YAYBitgBQSRAYxIyTiLCCIhgqLiqYE5M2evXP1Y3RR+/OILja2T2mcuuHVn6PZILlsI -PD+0kQ0i6xsOrI1EImtLNoYZRKC08CvrB5FII5o7Z5csnL9s2fJHBEAYhnv37V/RPb0dYLgvbdN5 -m/dNIbCBAcNsWLgi71ji8RMLAGKsHpCQQAtmFRzPF35p8xalFDMTUQzA8PDwoYOH1z3Vk1fBrdxI -yssXQi+wUWhNxMIChsUCioAFYUBhQAEGjG+rCBABEdhI5vTA1seemDBhQu0UmPnw4UMz5izgxISB -G7eHsp7nhWFgfGN9a0NrIxEWEeZ438bmiwAIkGMLjICIqDRaPxq4vOZDH00kEpUpfLCB2PXr1y9c -uLChZxal81GmYLNBOOKZbMAFY33LkWVjmUVYmBnKcoeyAUaKNwA4iL0mHAR4Ytu2WocSAE6fOj04 -2D93xewBLzviF3JBIRt42TDKRZI3kjPgMQQsAYOxaC2wiJF4t2G8OpUwOcRDXngr+6Gnn6qbwsWL -F8+cOTNn+YaCb1P5oueHnh+lgzAVhrko8iJjjLHGoLVgLVhGERIQQRZBAQQBJCK0ytH5fgjTOx9/ -sm4KH+AO2L1nTz47smNeV75/xOaLUcazRSPGCogIS8lLj1d67A5K2SkvedpIAIAu4sFstqm9ff26 -9XVP//LLrzgN1Lly+pXsSD4o5iM/byQSjAREsCxoESmjCygiBAgICkUjKAFJUP5iqgkb1q5fWzeF -t99+O5XOdi1aPTCSzxc83zfpICyayFhGZhQBEBLgamjBEj9gvJmRiIhAFGk7cGlSS8OGjZvuA8BD -3gH79h+YOqF5dlODl87bYmQ9I8bassNfjVBkTMQVw0AIEPsRcjCbXrpk8ew5s+tucfDAwe5502GC -HinkPRN6VjwLIYMpGdvKPUq6WESwrBwUgAIAQkLKnLozv2f+woUL667/+muvtbRPbp4ye3AkE1j2 -oqhoIhMZtIZiALg2AJfSjWJ9CkCkNBIhoVj/1qnlK1d1d3U9olxQoVB4+bXXdiye0+JHQabInmFj -GGz1OaU+JK3+FQERiQAVJoT6wJwp+k/ufFyPzhBcuXLlwP4Dy7ctzkCQ973AWN+iEbQMKIBSATNW -BVIb/SoETUgEisDkTf700IaNG1tbW2uvHwTB7rfemLN4TYBN2XwxDEwhMmwsMVM5xqqN5QkABBAA -BAgAiIiQAAQ1FNN26MrWbduTycQHDkC8JK5evXb10qWNc7okW2QvsoEpLwuoJBxKyr5G8lLOTxAh -EAJCguB84BW02rxhQ92Nzpw9k86NdC2fmfLyvo18ZiMCUtZnGMu8ouOwdp4KwUHQAKBV2F+AdLTj -8R11Uzh//tylS5e6l6wbKfhBZAJjA7YQB7plpRbfDGvyDfHPGZGINCAhsnJo5KZLZv3GTY8iGxo/ -/c9efjmB/NjkScVUzvqRGMvCUpP2kXJOqCp9ACz5PoSESECACnFfZmT6rFlrVq+uu8WLP/rxxI6W -iT0TU7lcxDZkYAYGkNJlyhLCWjWHAqAQXAKK/81RuQvpVrdlw+aNddff/dZbkYX2WUtHcvkwNEVj -ImPLaZKK8So9eZw1iacjAEKoEIkIEQkx7Ds7Y2rHhvUbHl06+rU335o7tb3T1V6uKKFhYajV+2MU -j0AlH4dEggoAUSF6LCeKhcWLl0yfPp2ZayOA/fv2z1naHTVKwfMCa40AS7xtBHFUMCFlKcX3VYQK -gFCQEAEzp/tWLF8+d+6cuim8+vJL7Z1zVMuUTLYQRDY0FmPLXl7no9KviFJJ/wAQkkZSAICKbGj7 -Lqx87LGp06bdP2X7cAAgosHBwWPHj6/rnpb0oyAf2tAIS+1t7uoECIAIAiEQgUJAcAVvc3TV2Me3 -basszHicO3fu/LnzPetmZyPPN1HIUo5sY/FjdV9JVU+IAIFoFE2IiKQxKtr8+aH1m9YDYuX6RJTJ -ZI4fPzZj3jKPnaLnG2OZWVWSbFjNeFYfquJREClShKgQQCnx0pC9tWnL1jiweBQ74Njx4703r2+b -02nTeVMMJYrdnqqrKWPUFpQdHyJABUgIiA7D+dD3FT67a1csl8pv7N93wA8L05ZOGy7kfBNFJQUH -UnLAoWJQKso51s4KwQFABBRgh8IbOcjyzl1P1HmHhw4dvHGrd8ai9emCHwZhYCzHybWyrLEieqxk -oQUQBREJXSKFCAiWNA5fcZRs3/F43RQ+QAB2793XpGBpW2s+XeDIQFyNqU39S32dIoYFkVAhKhQE -EBTAt7Ppzjnz5vf01AnojdfeaJs+ITmjOVcoRmxNOYSInfDSasSqYSxrOHEINCEBoABrzJwbnjpx -8soVK+umcOjgQQuqrXN+NpePjPHjrHY5T46VwKUG4XiCjKhQaQAXgQCtYNR/obtzxvIVK+8TATxM -AJj59TffXDh1UqdSxazHoS3lR6Rmw1YWUq0ziogEqBA0IiEZKNhoby69c8uWOgcxm80ePHBg9vKu -qNEWAz9isYwiVaFI2aQjlAMjQAJwEB2E+A5CKBEMHb21ecvm7u7uUfUva1977dUpXQuxsSNfKAZW -QmZkIam4byVTVMKk7M4xCCE6RLGKM0gY5oKbp7du39Hc3PyIasI3b9w4fuLk9rkzsRhyYMQKiGDZ -XcDyDhCRUk5MqpsaFaKOdSc6Fq6Z4A7Dts2b625x9uzZK1euzN0wOxf4obWGQQC46uRWXB+s+lsi -iKAJNALF5SwH/VQQXM+tWbum7voD/QPHjrzdufCxvKEwCANjYhNT+7yjKl/lEh8iaSKXSMc2mbTk -BiEY3rhl66Moyser4ejRo5nB/q3dM/xMkUMjlu9m+SWu6tVOhRSig+gQEqFAEvBwMdcwsW1bTQ2g -FGPv2xeyP2XJ1EyxGDFHAhaAoOSh12BQWp0ssXZDDaAJFaIggKNyF1NN1LD98e11U9i/f99gKjNl -/ppswYsMh5ahlHiocZpHBzHx9kMiTeRgrEfRoopun53U2rRhw8ZHx4p4+bXXOxqc2Q0JP+9JZJm5 -4pcD1ljD0TZ41PIXQIMG5dDISM/8BWMzEK+89Or0+dOc9kTR8wyIYYTS6oxBrXqcsSmOs0oaxSFR -iISIiBYxdy41o2Pa6jVr6gDYs3u329CSaO8uFAqR5YirCYc4mpRRtrc0N4uISA6SptiHQCs26j/f -M2/u0qVLHwUA8exfe/PNVV3T2owE+RCikq6RmvkhVoKXqg2mWPoa4xSQwzDC5lwUbli3rrGxqc4A -HD50ePbK7sCxQRRFUja4tamMMROLnZ+SehMQQgk4da5//Yb1TU1NFe+QiKy1e3a/OXn2kogaPM8P -LNuShzu6AF/jZpX8LkSHMEGoEADAEKGfg4Fr6zZsdF33Ee2A02fOXL54cXPnVCz4kR+ysbEqLqcH -yiYgXvLVMBiRAB0ElwARBF3AC35xCODJHTvGJOAO9Q3e6VrdmQu8wNrQCpejCKy6uFh1ywUAgRAc -AkfFbhaJJn8o4GuFHU/urLv+5UuXTp44MX3BGi+SIIwCa0EYavewQI1TXQoLLCIqcpBcQo1ERJZc -ydwGzj6566lHR8zad+BAVCw8Nnmil/U4LDnncVmiPuNW91eN6CApREQUVAwnIy/ZNmHLpvr8ye63 -3ko0Om2z2wq+b0REsKz3UWLHp+oIIQIIIoE4KA4iAVH8SY25KyONbsPGzRvrvMP9+/cVA9vWuShf -KMa1Rh7tbsY1nFpzzACAqIFcRZqUQkQAC2gGLk9saVq56rFHB8Crr7/R3ZLsaUgWM0WJSgQHRODR -MRdINSJDQVJELpFDiARCGInH5mAqtWLlyq6ZM+s4Xq+/+sb0RVNgos4X/ZDFVBNMghUnqExriUOM -OPnjEGoiIgUABmHkSN/iRYuXLltWB8Drr73qtnbo9u5CsRhay8yqVFWoxjClb8vkJkFURA2KEkQa -EQmYMLJR1Hty5apVc+fNe0QAZDOZg4cOr5zW0WI5CCKxzNVHHb34y1kyFAAC1AAOgo71DyiGYWtO -huFTOx+vu0Xvrd6zp8/MWNYZUBRFxsTZtyqmUGNcBBFjBJyScUFCIgAhsAWTPj+wbsO6utA0k8ke -eftwe/dig64fhAGzERCO44maZJbUwA4iAESxA4oKgQAMKCmM2KHrmx7MAX04AJw6ffralcs7Z80I -cx6HFrjs8I9Ot9VHwYToILoIigBJBByG04VcQLhjzNPv3bt3ODs0c3VntlgMrbWl6mWlnlxO0pQY -XYAgGkEjOIQOaQVKgNjFwo2cDJsnnnyiPsI4d/bMmbPTF60vhjZiG7EV4Sp9rmqyRuUkiMhFShK6 -RJoQQAxqM3hVY/j4k7seHQAHDh7kwF/dPsHL+WBEWOJlSKOvXpuMQ0RSSA6SE+duURi15QNeZvqs -2cuWL6u7xf59+xMtbsus1qIXRAymVNXHUoSHAjU7jgEIwQF0SRwkJaUMNGgaPjc4fdLkx8ZwIA7s -2xcJtU5f4BU9Y0v109oSxhjPD4RIETmxEkUkRIvAAH7v6a6Z05csXvzoAHjtzbcWt7dMVcoWjWNF -I7iIbml9l77G38TfJ5ASWrkuOa7SpBAIBDGSgjVH0tlVq1ZNHc1CNMbs2b135vIZURK8KDIAXHKm -BLHEZKwJq4HiypcSTahRESkkBITIcubU4IL5C3rmzx9Tg3ylZUo3Nk8tBn5gxYKUKjA16mdU+QVR -EB2ihMIEESEIAgP5oSd9F1Y9tvpeJLiHX5RPp9Nv7tnzyTmdicD2FgIvMgOBX2BTn35DqOYUkbTG -DtdtU4lGwiRpDtkxcpnDawC/vXNnXbx25cqVE8eO7/y9LT6EgbFWSsXlspeLlSJbJcRWCA6iQ6RI -oSIAEY1BOvSupLZ8vj7AHh4eOnzo0JS5myJQQRD6bJktlQAYxe2tKFUGUEQaMUnxHgZGiFBx5jYU -72zZ+rvvSobvC4Cjx4/nhgYm9Uz5zxdv7rnRl/K8O2GQrXlovGvhHWBqUnU0N3a1tK5qmzBXNcxi -OJPPseusrwlQYyLN3t17Iwzbeto9P2QAK2Myq7HZrVS+CBwUB0EhKVQIyIKsrXczD1ne9fSH6m3Y -yZO37vSv2bHcC6LQWGbG0Q9f4S1VlJIQOkhJogSiRhSFAhwBmeHrDsG27TseEQDW2r/9zneY5Z9O -XbmRLYQA2DwxMa3HbZ6YaOlQjRN0QzM5CUQFwGKN8QvGz4XZ4TCXGswNmaFbMJRro94FLW3PNLa+ -mhuZ29Ozbm09SeSnL/50QueEhhkNKT9tBW05whAoZ8lq/HVCiH0rh8hBh4AQyYq1gEOn+2dOnrHy -sVX1KvTVV8Bpapwyb9jz4gCYKmFFhcNeW31BVESOooRSriIgBEJmioT926cXzp29bPnyRwHA8PDw -t7/9N//lB98HgMs+T9308UnzVicnzUhOmEKJBgQSsWKtsK0sfUREUiAiJoqKaT91J9d3ue/YSwf7 -Lp/JpPMAvzB3XiKRqK2y5bK5E8dPTFs0JXTYz5lQhAXjbHNV/gJYrsRTXHYnVEQaiZQWQgEbRGbk -+J1n1u6Y1D6p3sXas7t5crck2oJ8KmQrwsxCo5d/bfaNkRyiBGIDoUOECi2ARQzDoty5sPZXnm5s -bIoJrB8gAH19fV/56le/9c2/tgLdOz89fdPHnaY2YBYbWhPafKYcdAmMymaVWfOAykk2z1zQ2rV4 -+uqn09dPXvrZ38LgjTOnT3/ve9/75V/+5UozzLHjx65cu7Ll4xs9G4SWLSNwNQKuaeyIfX+Jcw8J -RBeVUooIrYAoKA4Wwmu57b+5vW4ily5dPnHiRMeiZwIrxTC0LMJcKb1IjQWOvzISEiVQJYkShHH4 -a0QMakjfgTAVk+DeXTX3PeSff/rTn/7jf3zeCiz59L+b8+wXSSnrZTn02BopEQ5rqi5SU44sVcGY -bWj9QlTM2qA4cdaKNb/+v05euv3q1at/9fWvnzx5ssZBPGC1beuZVPQDw6PNL1ZILTE/oZT50QhE -pEgr1IQKBCNHchfTSUpu2VYfYRw/fqx/aKRt1nI/CEPDLLEBKFFW47oOVlk1Igia0EVKkNJKxT40 -gniizOClCY2J9Rs2vmMJ7P0CMDAw8MILPxrJ5ns+9oftizb7qduARDoRFkeMn9duQ5m+IaOkL7V/ -HVXFiooZNuGCj/7ehLmr9+3b993vfjcIgvhee97a0zq9RU12fd+UWG+ItTurYttj5eMCaEIHlSJN -pMoGAAZP9c3umjXWAd23dw8lW5JtMzzfNzHltpyDqzYzYTUVoZAcUgmFCYVKoUIUwkgwMlH+xqkF -ixbNnTPnA98Bly9fPnLoYKK9a9raX4jyKVRaTHTh+39++C8/deh//3Tv3u/FM39A6YMIEsWGYf6z -XxCAF1544dKlSwDQe7v38KHD3eu6jOLIlh1QHlWRrTg/JfefwEHSpDUqQi2AliQshJlT/RvWr+/o -6Kgro7780k/bu5dxss0Pg0hYWGJec20RWMoFA0ZUSiUQ4/yPgxSnNAxSkE/BwKXtO3Y2NTV+4ADk -cvlsJtM0dU7MFdPJ5lu7/2nw5GudjtNmo2sv/Z+Z6yfITT6g9CsupTVBonli2/wNI0P9uVwurkHe -6r81deV030QGkAWlnOUWwFjHcVkTIaFDsfOjHNJKaSIUENGQ68vDYLhxy6a6Cszly5cuXbo8edZy -P7KBtVak3LAA9XUGxDjz4yAliBKISpFSFENlUJmhm8jeunfiYD2kSBirLgIiso0yt88nAZ5tbtvS -MhEA8r0XqjSFB5K+AMQEK9U4ZVa8yOMSmG7WTV3Nnh8aKSWZyvZQEDCuucfepwvgAjqIGlVsAACQ -UayC/LlUk9P01DNP1659AHjj9dcKXtAwY3EQBBEzMCPUKp/R7ApEReQiJhS5SjmEilAILaJnwb9z -pnNK+9Zt2x4JABXTKiDCqJzmyXN8gD251OncCAA0TZ0NMSdOaiOwCjfobtKvIBQnkxEA4PVXXps8 -f7K0YBjauKemvBxL5bWyJhJC1CSaQCEppbXSGim2FpZ5+OzAnK5Zs2bNqqvi7du7x22dpponF30/ -7rko9Y/VxIxlyAUJNZKrKIHkKNIalUIBsYI2DEzfxUWLF3V2znwPfcv0HjZAZVELi/Fy3Ts/OXH+ -+rOC17Tbtf1X2+atsUGxKn2RejDkbluhBCoDgOO4t27eOnP2zPSV0wM0gQWuVh+lTPgtYYCADoBG -0EQuKUc5ihxCEhBLEGaDzMm+pz/8jNKqlgSXTo/s2f3WhK6lETV4UcQ8ijmG5RJ87HAJIpFKECWJ -3Hh/KYWEghAB+YVhGLm26+ln6nh8H2wgVhWbWCS1+BNfzt2+oNyGpsndxs9DJRv9oNKXWrPa0NBw -YN/+bCE7aVG7HxoT95aO5kdJmaKjETSBi+CQUuQ4pBOoBFEYWEu2vwAZ3rSlvkXi3Llzt3rvzNzx -i8UwjNgycK3bX5tFj4kPishBSCp0FTqKFJEgWmGDFA7fBLAbN215tw7ouwYgDvBYxMaEYWEABkZm -K9a0TJ8vYo2XrV/7Y1G7u/RLzjcpFfj+G6+/mehINHQ2pTzPMnL5M7EnSqW2FwAAheiQaEQHlSbt -KEcRWURmMWBTx/umd0xbt76+D/LNN94oBDY5bVE68CK2cb+XKocVlaoLCAoBKXKRkkq5MRKKiDAC -EJB8xObmyZ5ZM+Ma5AcLQHz1jvaOmIChnCTrBCCVHpUtgJBO1gRipW/YhtUa9z2lX6LdJFw3k83s -37e/Y9FknKTNAIpCZKK4cSYmK2ApHaQQdez+K1KktHJcVIRkQFjYZzt8om/jouVju9T379vbOGkm -NE70MzkuFferNVQpl+8EBVBpUK5Cl8hR5GrSClEhM0eMfrHg955Z9ezmtra296ZL9P3TbZWYKB6N -jY2+73uer/1c9vpJ42VLANTWfutkCtDY0UlKy931fvWTcZOeMeaVV169dOXS1KemDZwdymYCHzCu -katmR7c3oEKxDAIEqOPEJ6FGrVA7SivSiCjMhiQYKhavpXf+weN1B3309Q8cPLBv0qzNBp2ALbMg -l44bqGQ3ykYGFVEp+lWoFbqaFKnYYQpARZk+8Aa3btsB73Xoe4W7Bw8efPPNN2/evAnlEh0AOFpf -uXK1GERw7cSxb3/xAe+x8rNfa5wyS6JgtPmQOumDiFbq9u3+r/7Zv2frjfwwBz+8UCX/NKiGGU3N -q6c2Lu1wJjfG/cQOxtpfx/+5qDUpRmAQ60jqcsr19cbNm+q06PHjx3r7hpZtXp4PIxNrHxG8i52D -mDuQULH5Va5SrtJEaEUsSyiUv32+Oak3jSFSvncArLX79x/41reee+VnP7kzMAwAzoSpOtksbKQc -9TRNm3c3wvPdyIhsSTnkuFBiQd1r7VctoAH42JMtH/+FRflMVOG4pXLhC68M7H475V1INa2YMvHZ -ucmeiQogZv07SBqVVo5DWhHFBwYwcPpM/4wpU1fX1CBjAF595WVQjaqt2w/92PtUoycQJ0EZIPY+ -E0gJIofQ1UorQkXC1iD6kYl6zyydN2/xu6lB3g+AIAief/75b3zjG2fOnKHmybM//LsdizfrZDMQ -QalOHQfrNbHVaA0+Ktoq/5WjkG1077UvdV7Qzo0dn/oX82EgqAZDIn/0L+a/fWzkM79//PSJAQlM -x8cWJOe1OQo1oRNrf+06SiEis1gUrxikTg58ZNUT7R0dtX3eALD7zTdaZ/REbkuQzVkRFI71T7XI -gKUAz0HlKkpQyflxNClFgsAALOjn0jBwef2zH2tpaXk4ALz44otf/epXbt68NXXdL859+vO6ocUU -cyJGTDWlXJJWzEuQyjdQTRCIVImsWAPMPaVf004hAgBFz0IqhHQw6vMCax5rO/Kjzc/++tuv7kul -X7nW2rHImdaohQiVVk4ClUYlQCzGkHiDxehm7pnff6bOj7h8+fK5M2dalnwkEIxsJKMorFXeA2Ns -ANBFTCh0FDqaHK0UoUVkYE8wytwGm9nx+BPvp6pYDcROnjz5V3/19Zs3b3Vu+9VFH/8jRBXlUsym -fPgEj45XuezYVJJkXCNrLgXD/CDSF6jl+EANGFKVPoBIOnQV/uz5tUvmNmUP92cP3VEsrnIcclzS -rtZKaUCwAoY4dXnIifSmbfXu+aGDB1LZQvOMxcUwiESYGaUsBawUj+KzHpRLlERKKOUolVDKVUop -JQgsEljI37k4odHdvHXbQwCgUCj8/d///VtvvNkyd/WcD30uLKTZBFINeuocm9HL9h3zPO8o/aoV -kDEWpYKBIAIXrWqgb/27JQCSOdIf9ftaKa2cBGlNWhMJghUJbDRw5PbCBQvm9cyrA5AwdpsAABIx -SURBVOCtN14HpxXaZgaBL8zEXFaCFe8TBAGINKJLlFDoEDqKdCnDDYJiRLwwDG+eWrZiZXf3rIcA -wKVLl372k58wQM+zv80mkCgc7TXCg+f3x0gf7ml16y4yKicBtWu/8kkigGy0bf3ED+9oz1xKZ86l -EJSrHYeUJi2AAsjIBc9PnxlYs25tY+Oo/LDneYcPH2qaNi/UjZE11YxPqdm4dGcWQCInXv5EWpGj -yFFKESIBCxjAXG4Ehi6v37j5/i1gDwrASy+9fOrkyckrnmzpXGgDbxTbtV768i6lL6Oi4neQvowi -UUhNlqLyeRZw8HP//UwAGD6TAk8S2nGVo1ABEotEyuauj0gq3PVUPUPtzNmzp06eap2zJrQl3Vpq -qSm3GMRbhYg0YgNRUmMseldr11FKxf4H5yxGfZcRwvdpAEoAjIyM7D9wwAJMW/2sDYpA9JA0z10u -cg/pl5xWNmE54XDXHVD+ed5sf6x1RoczfCNnspGrtaMdIgWALBBpSJ0bbHWbl69cUV+DPHasGJrk -tB4/ClkAK0f9ANRUvgAIXaL4P03klgNgImSAiK1nIdt7dvrkSStXrHgIAAwNDV25dAEbJzVNmc3x -xrz/2ocH1Pv1m+A+0kcEsaY4fBsAWloUsNRLvxYB37bPbFi7rCUcLESpIKETWsUZUDDIQRj2H+1d -tXzl2B6Vn7z4I9XaCS1Tgygo09+qPRcV4gkhJYgaiFxSrkOuo1yHHKUQiQV8hqCQh5sn165fX5vl -fu8AXL9+vffGtdauRZRs5AqR5F5rH8b4+w+QZYO6lsm6tQ8CgBwFmVvnAGDxvGYI7JjAreYXBQBl -0dwmENCIrtIOKkIEEEuSSxe8S6nVa9fUZSA8zzt86ODErsUhupYNg9gaDmjVA1Nl0qdCRagVOUpp -pbQmjP0fpnx6EIr969ZveA/Zt7sAkMlk8vlcY/tMchIQA3CftS/vwed5B+mLNTrR2H/qDevnm5uc -DavaoGDuJ30Q8Hj9shZQ6JB2yFFKCyKLRGTTV1KQ4bEG4OCB/Td6bydnLPUtW2bkcoON1PTAIGik -BFFSKZfI1ZTQ2tHoOKQIAcGAeAzhnYuaZOf7NgAlAAhRaddpaR81w4ez9t9J74OAiNPYmu+/cnPv -9wHgo7smt3Q4YuuILVD/bKHtnOoColLKUbqkf0ACtJkLA21NE9ZtWDc2A2qtoo7ZYRSwlA8OEZTK -iWkIQEoTJRQlFDpKOZocRY5WDpUqMAagENpC34Wu6VNWPfbY+wdAxyIi7erGVmFbz6YaleO8l/Qr -R8VgVZViKWdfIomVKJZSPVyEEEmR0khq5MrRiz/+to1CAPgfvzAHAosId/F3R5nl+IBR1KS10iIA -YI2I5wV3jt18YsOGKWPOet2/dw+2ddpEm/E8KrUAj/K6GFEROqQSRC6iVugq0ppcrVxNQCAWjEDo -FeXO+TXPbmpubnk4AAAIaa0TDWLNaOnLO6x9EVQ65gKJNSX3pfxJqdHXIPWIsgnDfLo4fHPo7N6h -8wfj23znz5evWt4K6Wi0dyRjnKJKzgw0KU3aIFuREG16MBOez2z71fr88K1bt44cebula10RNbPB -0rlXQtXDJYSIXFRJwoQiR5FL6CqV0CrhKKXIAhtmjyGbugNe/66nnqlk9x4CAIiE5NRJ8H7SZyHH -UU5DcfBa38VD+f5rYSFnTNygJPW5UQQYUyy1UWhywxCOlBLdDn3z3y753CdnykgUd5zXu/93CQsE -ADRphcqAWBZDPHJlULFav75e/xw7dux238Ck5Uv9KGQRQaFq31dp+SOSQ+jG6VWFrlbaUY4m1yGl -0Fi0zDkD5vaFxoSOO43fvxHW1TzVu1j7QI5r/cLlH39r8OTrFdE6AM2NNWxWuWs5v8o8xgQ0dyQm -tyc/vHPyFz/VPbUzCZnwAaQvNfQR1KgIFaCNrC1Q0H/o5pyZs1fXnPQUj91vvWkgCe2zIxOCCAnL -aEYCkoqdnyQpl9Al5WjtapV0lKsUELIVAzLiRfnrx9YuWbJ0yVJ4GEOXyGXCzKYqnnda+zYonvxP -f+IN9wIAEfwP/2z2k1un9HQ3trdqKp+cNDovDaMz1RD7360tevK0BLgEmRBGwjGx3r2kLzErBQHK -FEQJwRQKxaGzfbvWfXRSe/sYAF5PTpsPbrMUsyBcOriisiAIkFATJYjc2PWsmF+tlUJGYOGCgSAz -IoNX1370E42NDQ8RABBrOPRLhKr7rn0BQaTzP/zfYun/8kdmfv3fruia2wIEEFqID9Gos8/3FCKA -YShayEY1CI39Cnf9dRFARIWEiIYlIE7dHIbb/tYaEm6so69du3r86LGmxz7hCwpblGrerXLggyZK -IibiuFeRq5XrKldTwiFHk8/WghSYMn1X0OZ2jGnlfF8AxAQ34+fLSYh7Sh9AnGTTwMk3MtdPAcBv -fXbuf/jmBsiEMOJXe0frpX9XPOq80vtI/57gKcIYAAC0YAM0QxcGkpTYur0egH179uT9qGnyHI8j -EbECKKAqNyYAQg2UUCqpUCty4+WvKOEoVytSKCyRiG/Ev3l2cvuE2qMmHkIcIABswqgwgqPaYO8i -fUSyYTB0bg8AbFzb/h/+fDUM++KZEnHwvUlfHlz6NbtB4/BIBAIIZEFCtkUbjJzunzWze+mypXU0 -0DffeA2SE8PmaRyFIqJBsOYcAgHEmPUWNz4qcpRyHOVq5TrKcZQgWIFIaKTgmf6LC+fPnz9/wft/ -eUcVAKU0IQbpQWELCPeSflyjDgupkaunAOBf/8FiSJCEjAD3yFzeSxeNQehB136F4iiQpH0nMsyx -GuMIbSGdHz51e/v27dpxKtJRSuXzuQP79zpT5rPTxDaSCsOgErEQOUQx8S1e/gmtXK0SDiUdpQgE -xDAXLHipARi+sn3HzvdGgrsnAN3dXVOndxaGbtjQqx5PJGNywgKIGOVSYIMpkxs/tHUKpAJEuJuY -YHSRa2zNa/RP5L62t77uX/rJyYtFQGSESKxPNnsnDQPRzl2P13mHFy5cuHr1mtu5OGCuMH+rcR4i -xdJX6CqMl7/W5GjlOlorYhDDEjDnQskO3AAIH9/11ENxQKsAzJjROWvuPK/3fJQfwbi8UC/9avrT -hkUAWLaohVoda/ie0r+L/bzXT+66OeoMw+gPJ3D4pnfoZK59altje7MXhUXj95640dE6af2GehLc -nt2704XInbIQTIhxVzFi9fRDRE2UJEqSckraXzlaJbRyFDFwKPG7D2zaCwvXTnRN61ixctVDBqCt -rW3BwsUAnL99EZVTSgXXaZXa1CbApAkuMCPCfaUv70b6UF+Tufu2ABCBJvXS4cydFHf2TElMasqH -QSYoDh3tXbxkyazZs+tmuHfPW9DYEbZMARtYAcL4hG8QAiEiUi6SQ+ggEqEiVAqJUFAs29DYwFjf -sBfJYKZgb595bO26yZMnw8MbBACJRGL9urUtTcnbh/5fRCy/1gLultYvnQ1MNecUPoD04V1Iv/4D -Uv8BBPDtN/+hF0DP2TgvSkjeFFODqcKlkc1btzh61FnTfX39B/fvb561EpQbk09K2fYSwRFVfNKV -xCQDiERCa4vGFqIoG5qsH2X9KBtEIwH7A7egcPvdtgE/aEly8+bNS5evzN06l756XDe0lOshY9P6 -tcdgyQNLX96r9Ot3ABuBifp7Px7afSw/Y8mUpp62EVNMc2Hk8qDyYOfYs6AvXLh2o1fNWAxs4tmW -s84U97UyADMEAh6LZ23B2HQYpYMw5UXDxWCoGKaKYdaPBn3O9V1qdPTGTZs/EABmzpy5a9eHCODy -S/+XWENu4h7SrzGkAu9J+vAupS+1zAma5Fy5UPztf30RtJq0sTPbFNzJ96fCdP/p3hmTpi2tOegj -BmDv7jcZXDWh01hDgAhApdZHKbGsRSK2BWPSUTQUREPFcKgQDOb925nijZHC9XT++kihN+1nc4F/ -8/Ss2d0LFy56uADoCiX/M5/5zJEjR1588cVLP35u4S/9nphIbFStF1W9/Lq6OY4i+L+z9B8sLBjt -ngoLEsAU98jB9Ed/52yqwBOfnOUva7mYGxJmcHXm5MCuFZu7uroq6cn4689+8iJOnhM2tKMJmJAZ -gIjiN2XF2Qg2IYBFKEblnBCCKjUeQ4mvi45iAwMX1//Khx6uARjFjOvp6fnSl750+/adY8deM0Gw -4NnPO00TOArLjCsAEHKTqN36HXC/vLEAADTrd3L2R7dp8GgflAU15oeC/+Mvr/6b524EBtq2d7V+ -aDY3O2HE5BKnw+BGZtsnt9bmhxHxdm/v+XPnk9M2C2kQX6r91tUzLuN3O0jNRAxCFB96KiAIJMIO -NeQGIRjetHkLPOwxqmq6a9eur33tL7785S/v27f38PUTMzf84oSuJcptqEgHnUSUHap38++TOyOM -Iv7md676ISPK6ObzWj5ouWzAo9hxAKAVFAr22LnCSwcyhYKByU2Tt81s3dmtpzRxZElAmh17dlB7 -+NSHn4b6k+D23hkcad2wOlIOOAkUofilPszl5scyC7S81uveM1GK5txmfeN6U8J54oGP4nvv7Ohd -u3Y1NTX9zd/8zf/9T9+9/sZ37/l7VbYz3s/zIfCtfOkr5x7OwyaAkpy/mfK+lym9ZVMAXCqcG5rV -2Tl3DAnutdffAODcsR8JvQRgUUTKffCjjiO+z4g/o5zw5omli+bOnj3nAwcAETdv3jx//vxisXjo -4MFkMlEbuiOgZXvxyh2pZdTcx/ZacAn/5edmF32LdWWZaqWs9IOkS3uOZ/YeLS5cOK+luZnLpwaX -oIwbo1kkZPFGsUi91gkf/9VfqXsbVRAEnucvWbKkKREiBPc5AOsBhpdvnPHZ3/jNsW9cff8D75XT -yGazQRCMivdEHNft7b316//8dzonnP3+P2ziobDEy7u/7W1zYq/v3iZXAADanH//tfP/5q/9H/7w -Hzdv2mSMqfVnsHLu8ZjnFZFkMuGMPihVRAqFQtkkjDpY9N3DIMLS2NR01/fRPuQdUBl1h5dXRhgG -bsKtNPFD7aa+x1c7FIzmOccuuCCitdWzGSjkQs6Sopbm5oaG91vuQMQHObz8v/p4122qXNH+8M41 -k/irIqic6BkzEDFBkFQQWuUoyETxMUCgQCko0x1+Xsa7Zvb6vh/4fvkki3eZdQBgy9ikrtz0f+lz -R1d+dP9XvnEVXEKN8HMk8/e3A65fv3712rWuVTiKwHI3vsKY7kkAEVKQGYke//WjN+5EOtH2J9+4 -5gX81T+YA9koJmsC/nwB8K53QLFYLBQKjsYaPh/U08rvUnEsf9+iX3hr+MadYMfOHX/2p380Y+as -//iD21K0dznlbxyAu44JEya0t0+62e+BVF+ic9/61+iNEvG0dgcAMun0mdMnUqnM1A4XdclHIhQc -B+D+o7Nz5tzZXfuPZnov51WjAr5XrfHuKQrJmSc2TfyNX5l+7NjR5//uH0Iv/Re/Pw+o5Er1D0XG -8sOq9v23ugPaFi6czwx//PUL0OaAg/dNq9X/BAnA57/9454Xnlv23L/qOfv/rHlyXavkIkgqP2P+ -038ZmDK55WFRbv7bNMITJrR++tOf2rdv//P/+cScrqZ/9T8vhOFAQq4GunJfahuAGEYFH945ERAg -YvAYXQSCX/ufzg1l4J99cldPT8/4Drjf2LRp0xe/+FuTJrV/+S8vfua3Do8UDHa40KzAoXdOz8Wv -mbICBQu+BY3Qrq/3B49/9tgPXk1v2rTp137tU01NTT8/AOB7U7hBEPzd3/3dc8996/jxo8kE/P5v -zPnvnp66sKuhscMBAAgthFw65adiJOJTixWARnAQDBdT0fFLxX98ceCvv9sHgDt37vjTP/3TrVu3 -ws/TwPds8YwxBw8efP7553/yk5/dvHkdAJYvSK5e2rKsp3lhd+PUDndim25poKSDBGBYir7NFWwq -Fd4ZDi/d8o6fy5+4mD9zxQDQ/Pk9H/nIL3z2s59d8b573n6OAIhHoVA4fPjwSy+9dOToiVOnL/b1 -D0dBAcTTWjkOOgqJSlQjYyUyHBlhC8ptdhONXTOnr1u9bOPGdZs3b168ePH7z//8PAJQ2Q2pVOrG -jRtDQ0NXrly91XsbK29+lWr/bRzoaq0XzO+ZMWP61KlTZ86cea+s3zgADzrqukSY+f4vcAWAuv7F -999n8vO+A8bHI3VDx8c4AOMAjI9xAMYBGB/jAIwDMD7GAfj/9/j/ALx5l+6hc4KdAAAAAElFTkSu -QmCC diff --git a/Golden_Repo/j/JupyterKernel-PyParaView/logo-32x32.png.base64 b/Golden_Repo/j/JupyterKernel-PyParaView/logo-32x32.png.base64 deleted file mode 100644 index 3d6f3a3f2a8931f26edf0d2a36ad63722254eef6..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyParaView/logo-32x32.png.base64 +++ /dev/null @@ -1,30 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAIAAAD8GO2jAAAACXBIWXMAAC4jAAAuIwF4pT92AAAA -B3RJTUUH5AQCEAg5lXI8DwAAAB1pVFh0Q29tbWVudAAAAAAAQ3JlYXRlZCB3aXRoIEdJTVBkLmUH -AAAGFklEQVRIx+1We2yVdxl+3u/+nUvb03PoWVs6ymUt6yB0QGIpjMvYHN3YMq3ZBJ0yI9MoWdRk -+8MtJjONidENcbiKToiIbogxE2HOsQkGWaUtXWdpC+1pDwUKpffLufR83/f7vf7RaaCXxMzsDxOf -P99f8jy/95LnfYmZ8XFCwceM/wt8FIG52v7R4jTr864vfzEfQiTSYtJhlyBFqjD/h6++umfPS/HR -jqFUMiMcR8JlDnuBgy/v3/uTV87FboyOpdKukFKAcS3edr7uBABtJnvC9Ur7O+83A8meAXfMkY6M -Mv28fDmAhu6T4UfnjwxODGUySY+dHKOsvhjAWw0dzoIH4omhvslUynUkU3k4OcU2i8Dufft2+CKd -py+SoQAgonMjA/d/dlvf5RvD0WRDLOZJlQAimuzu+/S6Z9x0omvCirfGPDejAiqBA9HqO/PmbnJv -12jbNZgEAhPrpvpWYnzzihUH9x/IqVyQ8VQCAeTXMHjq0poH76nduzu0bLOTmdSIiIhUjS+1Plb9 -yJwC/Sf+NDKaJBAYJMkylfyqLQCuupfar/ZpBAIzEDRw78L1AGIDTmdPr67QVGN9honeppLyitkF -WnuurMq4kgAABMVUQqTgnkoA73afTKQlEQGwiI2QVRm6G8DRuq6x8TEQMSAUJdvyPbyudM4xfe31 -39ybnQeFFSKSMMPmsVj3zqeeavxrY05FVEgFgGCETTVW1/Xkti/EO1rSOSVSCgLA8Ou61OxNd0Xm -FMjpaD7TeqG5v6e5L94yfNX2KRcNuzgn9OaJY/qSCDED0BWETNO5kCwoLTp8+Ii14G6FBQCPUODz -XWl8e+dXvvpvwlumqP6908+827Nox48tXxYRuRPDz7XXlxY5AJpH3+/rn1RIkUBYg+kP3Ld4I4Cz -XRM3Ur0KFIB1TY/Yhh+DgVB09gx2Pv/S8oeeVI6+GDKsgJMOGeZdK9b1Fj/a3tR4dqjJ8xQGFPA8 -005r/InQCgDH6+OumwHBYxRYFnzBxysWzWkVqmEbdhBl6+FlwBLSY+FlR6KHag/M31rGIGbO0jjX -n3vuWMOubz79t3eOWqUbVBZgKJo6P2APpNKbVy28mfPDEr1Y87wdyL7eWif7hwk82t1cWPGIlZMH -lrapH/nDYT2T1z3oLX1iadiVWVYwNB6EiePv1CF7GUauC3DEtMNZvtPHDm+vPTJdIFJcdvvndmvC -KPnaQUgPUoDZnRiUboZZOg698J3cbdtLh2Nj+V9vWvnsfUqWsbFwDYC3P7iRsG8ziDxFKbQNy2fe -UWBNmxrt5R/UFFR/15AppMYzSQmWkAJSgiWzgJQK8ehABldSuZa6c6EwQnnxwf7PlzwIoCk2aiwT -kjnbMPKzfZOmf0tZ3nS77rg8EAxHp34NlgoppJqk6lB1UnQjmHv+5JtVlYWQAp6ouNP2yeDZ371X -/aXHfnvgFWNVFaQUpBT4rGgocCHWWV21dnoGZs48OZkEC7DUTfvML/esXpLRFAIzCImR67/aZRUv -uB0JFyRVkrYZuMO/CMCpf1zxsEgDfIae77fmhfzdrx/f8Iuj0wUy40NKWBNSKJpRv78m1VJkmToc -78NCiRBGHCRcSAGwEMILaiuDZQB+fbqDivI8ULFtRUN+02+Wl0RnOptWkG38PZ1UpSTV2Lo2aWWA -4RSYIQSknGoGpICQ0Plsm9O3vO3ZLTucxMj4ZFAHa4ZeFLBvC/ljKWwti8wUIGam3JIN36oVAvNa -nv79jxYjJdjzKFeHyv/ScEHc1Ti0/Kce+RLJI5f3fL/mG61LKNGvKGrAUAO22dvZHv/Z9uKl5dMz -ANDX+pdPVj2cW7jY9klIhhTkU5+rae+5ljZUpB2nLT5/XiRryI1sWpdXohUBGBp3NzhnVE0AgMc8 -QYXqpZns03fy9174zLefSGJSsJSkAuB9b1ze8YD9Rk/t459ae/M2nzLtmVt+ZvwWq3i/OY6wDmaS -Eo6o+2Bg70FV9/H42Mgtn5qNfc4434T6htaN6+3zfyzjWCV3reGW1acO5VeuLOb/ArOcLYde+3Pb -xU5d1yXzQ1s2Vawunasm/wnof/66/ifc3TKUs8YffwAAAABJRU5ErkJggg== diff --git a/Golden_Repo/j/JupyterKernel-PyParaView/logo-64x64.png.base64 b/Golden_Repo/j/JupyterKernel-PyParaView/logo-64x64.png.base64 deleted file mode 100644 index 3803d73352011e8d0bad7eaf26d19920a1e09ea7..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyParaView/logo-64x64.png.base64 +++ /dev/null @@ -1,79 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAIAAAAlC+aJAAAACXBIWXMAAC4jAAAuIwF4pT92AAAA -B3RJTUUH5AQCEAgk9nRQ1gAAAB1pVFh0Q29tbWVudAAAAAAAQ3JlYXRlZCB3aXRoIEdJTVBkLmUH -AAAQ20lEQVRo3u1aaZRV1ZX+9jnn3vumGhhEhgKVqiiTErVUjCiRQcIk4sAoHdeKMZ1eHV1pu81K -Z+gk2itZbRtjMHa6E5MYFRkNKjYRY0tkViNgCUIV1CBQRY3v1ZvvdHb/uLcGMAnDH81a3HXrrVX3 -7fPO/vb59nD2ucTM+Fu+BP7Gr/MAzgM4D+A8gPMAzgM4D+CTvNSZi27ftu2NHbusSAQkIIiIIEAE -gPrsofXEqqrpM2YAsG37x4/9xFKWJCkECRCBiELpcIzg4VWj5s2dB4CBx/7jR6YSUgpBRESCQIBA -OIZBAOc8cd9990Wj0bMGcNeSpb9cPqO9rt5LpnUm72bzft7jgq9dMMBaE2h7qo2ffDIAsPqFNbs/ -/N+qyaOPdXd0ZnMZN1/07ILWtgeXyNVgQsehEz9M3R8AeGPza0+/tGXs5FuPH+/qytrFQjHr6rTr -ae1DazBD+zAS2PH4Aw88cNYrUHScAbCnVVU0FgpOLOJ1ZrxozM+6usDsae2BmUuFfC7V9tOFC4Mh -v/n1bx96amltpllmE9F8Km3nc04x72vbR5Hha+iY6thSf/cX7w7kn33m1wvu+0ZnVjqRtJHOZ/KO -dBxyvKL2Xc+D9gGw694295ZzodCLL7185zXjGrbUZJs73FTWL3jsau2DocFgZiKSQnw0cMDwCy4I -hhyqe79RTdvRcqS7mMm7btHXLsNl0gzWDIK0VOKYqLhkVCD/xpa3lsz7znv7arKZYrroZHzf9Txf -a2gtwBogaXpHti381rJzAfDcqtUPV16c2VVXPJHWnq+hiYjBBDBARCTpqFO4ftbsQP5I3ZHhYwbt -bW48lu5Ie3A0McCaQAwmIopA28n89Ik3BvLdqRTHBtcd72xP5lrzBc/ziLXQDCD0MyKyYmjc9YW5 -T59LFNrz9vZRHtvtOc91NXwQGAyAGWCQJEPIV1qP37N0aSC/bs36q279bGPriU4XeY88hg6XChpQ -xKYhWrfXL1q6JJDf+PKGMZ+b03C0uSNf8G1bej75zAABxMxgQxkEMapcl5aWnTWAlta2KoOzje2+ -7UKEJgkvAgQJJcqV2FzMz5k+PQSwdu2Q6hGpou36PeEjlCYCRwWMqJHaemzu7fMC+ZXP/3bYFVOS -6VzRdQUzCEHsCWeS0hTCbW+8ddb0c8kDq1/ccOeEqq7mLs3+KV8xWAoig3wNu3K0ZZrB86Pt9V2U -y/saoMCGQSxksBJkCsCUQ4sDEmUlgfw7ez7IUmm2YAutGRTYngFmMGAKJZWpD229c8mycwHw/OoX -bhl2YSFV6BfxQ/UFkzCJpNifTs+9447g8ds736moHtbU1uZoAjOBCNyjD6LEkihzLD37xpmBfEP9 -YXVBVfOJ9qLrUc8vh7mCwFLGpNRGFM17pkybeS4Aju3fW5Z1fM/v/WkOLEMgIciSlhZrWj5afsed -4YqtXD1m9rhkOufp0O6BPgwyiSISypCNf6hdfs/yQP5369dWXjunvbPT0X5g/BAxg8GWVKYg17Un -Vg4+l1Jib80HnxsYSzYn2dchKznMqKwhLRKWLNViB/wbrr029MhXXy4dMzDvaVDQ8gjyLzPDEmxJ -ibiRe7fj8zOnhoBXrS6rrM4WbGKfg6TLvQ4m40qaUmabahYuvONcAKxct/6OS0dnOnIM3eu4we9L -QRSTIJG2nUHV1/TyqlO3dTlZm4m4l3QMQAnEJBSR62NsYnTvFAcaWrptyns+MQWO0lM9QEgZl8I3 -LBx869bbF54LgA2/W39NosQtuDg59oBBppBRKbXYkey4a/Gi4KtXX361asolrcm0pymgTaASg0zi -qJSCVNuHrYsW3BXIv/v2rsSoq9o7Ojzfo764DwY0UYlSESldaSFzZMLEK88FgNt0RHYXtK855AN6 -O2EiJshUUZ/WdbQsu21B8HDVs6sunlqZydoaYHCwWgGrYxKmMIQlmzbXzr/rtkB+zarnR1w1I5nN -aK17qR+AFlKWKmUI5DOpqdWXnUs5/cYf37plWHmuPRsVlBAyLkRMiETwaRnRhBJCWg4fiEY+Mzqk -xJadb6oRsYIfELl3BdgSiEthkOKY4kOFiVd/NpB/+ZWNsaFjC45H3JO5CAC0oJhSCSVYyu7a3YuW -LDmXcvr5tWvnjhyxYe9HNemkw5pYh2UtifKoMTEy9PoBQ7tymTFTQ3dMJVPFsnxnPutBoMf8Aemi -kiPKEFLmcvb1F1/V4xq6ocOzCm5Be6InbYVjhChV0pIyoywc+uPc+d8/awDFov3Mz578jTQTn502 -qKo6UjZUWFESgj3Hy6dz7UdXH9xV3LfjUuDhRx8OHWb1hqoZVal00Q+IgzAjmcRRSSZZJGXTrsZv -Lb4vkN+86dUBl03uTHXB173aE6CBiKHKDIOUyJOKi+TwipF/Vkn6S83dXD6fiMdHz31g5JSl8Dw7 -08naAxjMYA1mEkJZMRDtXfm9S6P5P+2rATBzyszEV+N1uXze6w0krEFlSl9oWSVWqRPBy9/acHTj -wRGjKgB8celd+6LT6rN+0bEFM3PAH/akHGrFRiZMUqK2MzXdeX3V7zaenQ888eNHy6+cN/y6+fnm -uprV34d2wRpaB9qDNfuum0s56Y6r7374vcbk07/8BYD3Dr7rlUpX9265WAMmcUySJS0pTFvqeKsR -aA9g85vbRNlw2/eJNfcM8kERqcpNaSrpSaOz5s3FS+8+6z3xwf0fDL7sWqHMd566r/rDt/b89ptG -rAwIte9dCgIVu1svnfXlP+3efuzocVmpUrmAP8wIDWoJigkVMaIkqKslNeuaaT0O05n0S5O5gtZ+ -4LphzSdEQqmEUkopW1o4smvmnLlnDUBKyZ7rO4XBY2/cZhcvHDdZO/lg7xjqxhqMABIJGY1EXnxh -/egZlZmsxxxqQ4Agikm2lGlJSytqeqtu0dLFwRSvbFg/aNzNqWx3EBvC2hBsSFWmVNSSQiFre5cM -kdFY4qwBaK0Z7OW7L7vt65d/c0PFpAW+XQi1x0nag5lZl5WVr3tx3YDLhxR9aA6EoMERwTEpo0bM -koZvom378dnzw03PqhdWlldeW3DtngQDJmgh4kqWGDJiKk2i63jdbfPnnEtbpay0jLUPZjfXzV7R -K2aZNWt9iu3BTMwAp5LJA+2HsqxtWwtARhUBEhQVsKQZM6JCiDw7Fc7gaDwWTLHtnQ+cSLnWPvdF -T1ZClSoVM5VpSIdUas/m2xcuOaO2Sk1Nzdf+/t6DtXVmrFQIOnGizS7mGz42YMiEKVVzv+bbuZBI -YA2ORiIrnvqf6TcOzj/8x1GCtuzrjn2uYsS9V5i+H5ciqiJxZbmCWmpb5k8PdzC1tQft+CXJXIY1 -iyC5EHwSJUqVGiphmSREjk20vD/5pptPD+BXTz/9pXvv/cySf6+aPZm1ZvZHChFSBRx2NcDE7DsF -v5jtqRiZWQPwPX3dFZFNb81ARx5gKMyZvfX3P9xV/b3rLS1iZtyURkF69Zs/XPFP3w1mfGnd6kET -piaLOWIdlNDMEEqWKBkzRNRSnqDuZLp67LDTN7aampq+dO+9N/zgD9rOe9mugBuaAw6jf8wJmdNf -+zCNsOMxuvJoKwQO9OrrN1aN3pTd1Vxx02UJIyKlsg2n+522G6aGu/g1a9Za1d9wkp2KwUG1LRBR -qkzJuGVELCOt/Za6PV9dsuj0rcVv/suDFTO+op2i9v1eZv9F7XuYw6eEo/B5zz6/Ibf211cf+M3B -eCweVxaTSOUzlw8a09clqGvNkSHCkABmsJBlUsZNFbdMpahIqrDv9dnz7zw9gA0vbbjohju0556x -9rpPe2gCfDvbbxOlAUDzlVeXIVOQOUTMqCZq+FNDbwm9a8c2Gj4xm8+QDjbNYIKlZJmhYqYRjxok -KOlC5JrGjZ9wGgAtLc0FjgrDOhvtuZ8kSyt6ombrlZcPRsYJtQ9GuXrSuLilDVOavsmHN+1fsOj2 -YNa1q54bOH6q6xZ0GHvAgkqUETNkPGLEIgYDrSdaZkyacPrutPZ8Y1AFa6+XG6dqDyJBwR8JAgkK -bkFCGlbJwFTD3uShbQ/dX4Wc16c9GHnvopGRqBElIfLK13X5cVeMD1PYK5t40MXwtQQxwASljDJD -xkwjZhkRQ7pStBzYtainy/TXnJjZNxPl7Doftz0RjFh5smFftrPN8wBoZoTAwAC5ue5k7Xa3q3HF -I+Mrh0eRdfqcgRmA7+uoEQFES1vHTVXXBVPadrGuw4t5HjRrQAA+oVzKhFIJU5XEDMOQWQf+/jfn -3PajM2yvU0/e7K892anWXf+5DMDC2UMuqjC0DptxfUkfGDG19J6F0weWK6SdPvP3YWBLRlzwgS3v -f3f5P4SbpNc2iYsmecUMoAXAzMowS5WKGTIWNUsiJgRO5L2BZnbIkAtPD4BIePk0CTqZOSyUseeX -X//Klyp//t+TkHNR9E/SrJdvRQ9FHxkXwKnfai0lGUIlDa9x88E5D4UpbNVzz5SMuznvBV15aCFK -pCoxVNwy4lEjHjXSvnO0vnberClndELDRHbXcRLqZO3VRzteGl8V/fkvrufGDNqLSNtIO0i7SDt9 -d7eDot87qp/5Ac2w6FiLDRLdnE+0G0OGh+bc9OYOL34BtA6KHwhZZqi4KeMRozRqGqYosjqxd8ui -xcvOCMCIESNML6M9t58eWgiVPFr74P3j0Zgh9Ivx0OjpsfXdH9c+sIWi7e/ntYmmxmO33jArmK/1 -REuHV+65xWC/rIGYocoMGbeMeMRMRA1H+20OULfzC3PnnxEAInHFhLGpphrofrU+2Pf14MFmn2Yf -Z8ifv9Fbse7a2RkfOswtkXtffXfx8jCebNywHpfepO2cAFiApIxLaUlpKAFJecdNFp2G1nRVRYSE -PNNDvge/8a/7n/1BZOAwaL/fTgXs65O17/EBffJ9kvl7BEZF5iyvue7LV9YWjrZtbZw55wth13r1 -mnhlNQUlFAhCFD3d4brHs8X6ZHZ/W/pwR/7wgb0L5s89i0O+xYuXPPnET9579ntXL/22k+8Ga2nG -QLKvLui1q2ZEBaLiJNrowBl7/pXKTTnjJmy1J4386DNoam6opBFmJOxav/72B9bicu13EBS0hq/T -vp1yiozgvIQhLbz32p3ffvTsTim37dw9a+aM3//bzPIxkwGQUIVjh4jGnVTnMMMUK37V+LOVRw2j -X8cELIksk23HkQLJlNfQwhhfMmjSoM63P8onc/84O2Tz3vfe9Ytm/tBOuEX4fr8WSk94JsBI4MTu -a6+/8ayPWTe99vrjjz/uOkUAplKvbCq1i0f7ypsAgOPPm3bB2PEJ0bcsIOLDx7I/fX7o/Q/eY9s2 -M5uGYkdr2yNQYUhh2Vf+LtymmpGfPPrPEcXoPbv4WL+e2R/49VVnfnb6F9sqTzz5qwvEY0unXXQq -v7mHLQIYYiDlIOfvru9asXHac8/89FN0Ul9/5IiU8uTg2C9/Ee8+mKFhr3/niXqUSCb4vv50vWpw -vLlZKdETW3HqIgxQDzyy//4HHnzk50cRJSX693w/HQBmzfz8j588gJFRaP4zRUS7/cp/XbX1tRXr -HhuDvH+4PqfUJwOA/sp7owMGVtx9O6947Cp0Osh5YcTsq7oZgw0YaHg/NXrW3g8/PDBmzNhP19sq -ba31+xuuoPKNDz2yb1ttZ7LgwPQRY0R0wXcPteeeefZw9c1bRs+q3bZt2yei/WlWILgOHz7y3Mr1 -//fmzsamI6apiAiA5+mSkrLq6qu/uGzBlCmTP8HXbej8q8fnAZwH8Ld9/T+f4wZfL+hhXAAAAABJ -RU5ErkJggg== diff --git a/Golden_Repo/j/JupyterKernel-PyQuantum/JupyterKernel-PyQuantum-1.1-2020.2.5-gcccoremkl-9.3.0-2020.2.254.eb b/Golden_Repo/j/JupyterKernel-PyQuantum/JupyterKernel-PyQuantum-1.1-2020.2.5-gcccoremkl-9.3.0-2020.2.254.eb deleted file mode 100644 index e4e966a70fe9e0bc9ad5809c6bfddbcb32946f59..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyQuantum/JupyterKernel-PyQuantum-1.1-2020.2.5-gcccoremkl-9.3.0-2020.2.254.eb +++ /dev/null @@ -1,141 +0,0 @@ -easyblock = 'Binary' - -name = 'JupyterKernel-PyQuantum' -version = '1.1' -local_cirqver = '0.9.1' -local_dwavever = '3.2.0' -local_pyquilver = '2.27.0' -local_qiskitver = '0.23.2' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://www.fz-juelich.de' -description = """ -Kernel for quantum computing in Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services for interactive computing across -dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -sources = [ - ('logo-128x128.png'), - ('logo-32x32.png'), - ('logo-64x64.png'), -] - -builddependencies = [ - ('Cirq', local_cirqver, local_pysuffix), # ensure it is available - ('DWave', local_dwavever, local_pysuffix), # ensure it is available - ('PyQuil', local_pyquilver, local_pysuffix), # ensure it is available - # ('Qiskit', local_qiskitver, local_pysuffix, ('gpsmkl', '2020')), # ensure it is available -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_kernel_dir = 'pyquantum' -local_kernel_name = 'PyQuantum-%s' % version - -modextrapaths = { - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - ( - '{ cat >> %%(builddir)s/env.sh; } << \'EOF\'\n' - 'export KERNEL_DIR=%s\n' - 'export KERNEL_NAME=%s\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_name), - 'source %%(builddir)s/env.sh && python -m ipykernel install --name=%s --prefix=%%(installdir)s' % local_kernel_dir, - - # write logo image - ( - 'source %%(builddir)s/env.sh && ' - ' cp %%(builddir)s/logo-32x32.png %%(installdir)s/share/jupyter/kernels/%s/logo-32x32.png' - ) % (local_kernel_dir), - ( - 'source %%(builddir)s/env.sh && ' - ' cp %%(builddir)s/logo-64x64.png %%(installdir)s/share/jupyter/kernels/%s/logo-64x64.png' - ) % (local_kernel_dir), - ( - 'source %%(builddir)s/env.sh && ' - ' cp %%(builddir)s/logo-128x128.png %%(installdir)s/share/jupyter/kernels/%s/logo-128x128.png' - ) % (local_kernel_dir), - - # write kernel.sh - ( - '{ source %%(builddir)s/env.sh && ' - ' cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.sh; } << \'EOF\'\n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module use $OTHERSTAGES \n' - 'module load Stages/2020 \n' - 'module load GCC/9.3.0 \n' - 'module load ParaStationMPI \n' - 'module load Python/%%(pyver)s \n' - 'module load Jupyter/%s%s \n' - '\n' - 'module load Cirq/%s%s \n' - 'module load DWave/%s%s \n' - 'module load PyQuil/%s%s \n' - 'module load Qiskit/%s%s \n' - '\n' - 'exec python -m ipykernel $@\n' - 'EOF' - ) % (local_kernel_dir, - local_jupyterver, local_pysuffix, - local_cirqver, local_pysuffix, - local_dwavever, local_pysuffix, - local_pyquilver, local_pysuffix, - local_qiskitver, local_pysuffix), - 'source %(builddir)s/env.sh && chmod +x %(installdir)s/share/jupyter/kernels/${KERNEL_DIR}/kernel.sh', - - # write kernel.json - ( - '{ source %%(builddir)s/env.sh && ' - ' cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.json; } << \'EOF\'\n' - '{ \n' - ' "argv": [ \n' - ' "%%(installdir)s/share/jupyter/kernels/%s/kernel.sh", \n' - ' "-m", \n' - ' "ipykernel_launcher", \n' - ' "-f", \n' - ' "{connection_file}" \n' - ' ], \n' - ' "display_name": "%s", \n' - ' "language": "python", \n' - ' "name": "%s" \n' - '}\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_name, local_kernel_name), -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - 'share/jupyter/kernels/%s/kernel.json' % local_kernel_dir, - ], - 'dirs': [ - 'share/jupyter/kernels/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-PyQuantum/JupyterKernel-PyQuantum-1.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-PyQuantum/JupyterKernel-PyQuantum-1.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 9f1707ccc4011ca047fec9d27b8a2468f2e66d94..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyQuantum/JupyterKernel-PyQuantum-1.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,135 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'JupyterKernel-PyQuantum' -version = '1.1' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-%(pyver)s' - -homepage = 'https://www.fz-juelich.de' -description = """ -Kernel for quantum computing in Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services for interactive computing across -dozens of programming languages. -""" -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('Cirq', '0.9.1', local_pysuffix), - ('DWave', '3.2.0', local_pysuffix), - ('PyQuil', '2.27.0', local_pysuffix), - # ('Qiskit', '0.23.5', local_pysuffix, ('gpsmkl', '2020')), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -components = [ - ('logos', '1.0', { - 'easyblock': 'Binary', - 'sources': [ - {'filename': 'logo-32x32.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-32x32.png"}, - {'filename': 'logo-64x64.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-64x64.png"}, - {'filename': 'logo-128x128.png.base64', 'extract_cmd': "base64 -d %s > %%(builddir)s/logo-128x128.png"}, - ], - }), -] - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': True, - 'download_dep_fail': True, - 'use_pip_for_deps': False, -} - -# additional Python packages -exts_list = [ -] - -local_kernel_dir = 'pyquantum' -local_kernel_name = 'PyQuantum-%s' % version - -modextrapaths = { - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # create kernel skeleton - ( - 'python -m ipykernel install --name=%s --prefix=%%(installdir)s && ' - 'mv %%(installdir)s/logo-32x32.png %%(installdir)s/share/jupyter/kernels/%s/logo-32x32.png && ' - 'mv %%(installdir)s/logo-64x64.png %%(installdir)s/share/jupyter/kernels/%s/logo-64x64.png && ' - 'mv %%(installdir)s/logo-128x128.png %%(installdir)s/share/jupyter/kernels/%s/logo-128x128.png' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_dir, local_kernel_dir), - - # write kernel.sh - ( - '{ cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.sh; } << EOF \n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module use \$OTHERSTAGES \n' - 'module load Stages/${STAGE} \n' - 'module load GCC/9.3.0 \n' - 'module load ParaStationMPI \n' - 'module load Qiskit/0.23.5-Python-3.8.5 \n' - 'module load %s/.%s%s \n' - '\n' - 'export PYTHONPATH=%%(installdir)s/lib/python%%(pyshortver)s/site-packages:\$PYTHONPATH \n' - 'exec python -m ipykernel \$@\n' - '\n' - 'EOF' - ) % (local_kernel_dir, name, version, versionsuffix), - 'chmod +x %%(installdir)s/share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - - # write kernel.json - ( - '{ cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.json; } << \'EOF\'\n' - '{ \n' - ' "argv": [ \n' - ' "%%(installdir)s/share/jupyter/kernels/%s/kernel.sh", \n' - ' "-m", \n' - ' "ipykernel_launcher", \n' - ' "-f", \n' - ' "{connection_file}" \n' - ' ], \n' - ' "display_name": "%s", \n' - ' "language": "python", \n' - ' "name": "%s" \n' - '}\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_name, local_kernel_name), -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - 'share/jupyter/kernels/%s/kernel.json' % local_kernel_dir, - ], - 'dirs': [ - 'share/jupyter/kernels/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-PyQuantum/logo-128x128.png.base64 b/Golden_Repo/j/JupyterKernel-PyQuantum/logo-128x128.png.base64 deleted file mode 100644 index 4a8f9967247763e6e53e234ef9c9d37638c72cab..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyQuantum/logo-128x128.png.base64 +++ /dev/null @@ -1,217 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAIAAABMXPacAAAACXBIWXMAAC4jAAAuIwF4pT92AAAA -B3RJTUUH5AQCEAI2/yLJFAAAAB1pVFh0Q29tbWVudAAAAAAAQ3JlYXRlZCB3aXRoIEdJTVBkLmUH -AAAgAElEQVR42u1dd3wcxfV/M7N7/XQqJ+lUrWJJLrItdwPGxg2MgYReEkPihFACDgkhISQhIYEE -AkmopgRCC8UUA6Y4GAM2uMvIRUKyZUuyetedpKtbZub3x55O7SSrJ86P+fhjn+/29mbfm3nl+8og -zjl8M/5zA39Dgm8YMDbjNN3Kpz0DXG7fE+/s2FdShRA6HVlwejPgkwPHzl7/tzue3LRlfzEAsNOQ -A6c3A3YXl1fUt3BgonC6PsjpzYDfX7fm44fWi4IQkJRvGDDRg3GOMUpPsBOEVcYAAH3DgP8IGziA -QScGbaFvGDBxU0eoud29q7BMUdXS6sZmlxvQ6bcJhNN6+T/4+rYn39nBqPr2Z/lMVl+6e51RL37D -gIkbly+dk+aIjrGa3L7AJIddL55+j4P+x7Agfrqp4tNVBwy4bk639XSa7QAOAJwzzgnGALC3+ORb -OwrKG5y5aY7Ll8yenZ0CAJQxjBBC6BsGjO2SBwDek6z3vfzxg6/92+31A0bAudVo/NMNl6y/7Jze -u+S/nRGnnw7o8PrdvsD7u4tuf/wNSZKBdElRSiMslqfu+M7SvOwIs9Fq1H+zA8aM4gWl1QXHa4oq -6qoaW1vavd6AVN/qUilFqPf8GTPodXHRNovJGG01ZibGzchMnjM5eXZ2SqTF+A0DTjEoY5pkBwC3 -TzpR27yt4OjW/OJDpTVuv18TQozxkJpFGPeZPEKIUwqa0EEIY6y9spj0s7NSz5s/dWle9rS0RJvZ -0P8Xv2FAcBw8UbMtv+TDvUWHTtR4/YG+4AJC3XZOWOk+2OMgg16Xl5164cLc8xZOn5eT+v99B3De -i4afHDj60sf7tuYXtXv8VFEAY4QxZ6wXuTkQQRAJxgA+SerFBs6Bc4vJSDD2yapKKeesJ0uCd2Oc -iILVaFi9IPfa1YvWLJr+/5QBnHebNJ8VHPvjix/lHz0pqSpXVVEvMgZUVQEhQRAAwGY2piXY5+RM -mpGekJ3iSHfEON3ei365obWjEzgHjIBxQCgtIe7Dv9xisxiqm1wnG1oLK+oPHq86VtXY5OrknKmU -AWVYwIRgJaAgQdCL4rycSXddu3rNotz+s/pfZkDoObcdOPrg65/sKjwRkGXgHDDGGDFZRYJg0Isz -M5IWTc88Z3bO4twMe6Slz02+Kq2+/Ym3Dh6vklRq0olnTst8+LYrp6TG97lMVun+ksrth47vLio7 -UlbT1ulRZRkLhENQYegEcemsrF+tPX/53JyJ58F/bAdUNbnue+mj1z/N9/r8gBEAAuDAuCCKGYlx -31o8c8XcKcvn5OgEMshNGONfFpbVOztT7JFnz8w85Y8WVtRv2VP04d6igtLKQECCoJJGoFKb1XLZ -OXPv/dG3EmNs//s74NkPdv/hxQ/qWl3AGBEJVRgg0Ani9Iyk269auXLuFEd0RIjECIV3pxjnuPfb -/d8JbbiQ86yZWHuLKx7ftP3TgpKAJAPnWCRMVgHhjMS4P17/7e+umv+/xoDQvm50dt744Csf538t -KwoimGCsyqqoExdMzbhr7erzF04Pmo5DGBq59x89eai0+qaLlw5X8QDAgdLqB175+N/7ivz+gCAS -yoGrVK/XXbZ07hO3XxM1Ia7DRDAgZO18frD0hgdfKa9rAuCIEE4pYJyREHvH1efefPGSkfkNT7// -5b+27N311C9HLLg/2vv1fS9vKSitVGQZCQKnFACmpye/8Ovvz58yCcYZYZ0IN0SjzNObd377rifL -axsRwYAQV6lBr//J5St3bfjlCKgfGiIhptEFYS44I3fHY7c/cNNlUbYIrlJACGFcXF6z5o7H3vy8 -AMYZ3x5fBoQSde55/sPbHtvo8XoFncgpA4DUhNiN99zw6PorEmIiVMpGvBEVlfpHlxJBKdOLwu1X -rvji8TsW5U5GCHPGRIOu1dWx7v4XH3p9Ww9o77RiAO9SiT968JU/v7JFliSsE1RFIQK5cvmCgud+ -8+3FM7XLBIJHLEAiLMbE2KjRzJMQrNF2Rkbirid/efuV5+r1ekVSsE7w+fy/ffbdu/7xnoZzjEfi -17jrgHUP/Ovlj/cwShHBXFWNRuNda8+/+3trxkqxt3V6XZ3eyclxYzjn5/+972ePb+zs9CBB4KpK -RPG2y1f+7ZbLxsNLGBcGhGa57v6X//XJXkopwpgrqs1mffDmy2+46KyxepLxc5re21W47s8vtHe6 -sShwygghd61d88cfXjTmOhmPA/WDVvvPN2z61yd7qaoijLlKrRbzy79ZN4bUD4kFlbKxnT8AXLx4 -5ub7b4mNimQKRQJWFeWh1z5+/O3tY66T8ThQHwDg4bc+f3zTZ1RVESEa9d+698ZvnTVzbJetpmYE -MpZPgVBQ3y6ZNXnTn26yRZg1aCTgD/z22fc+2vP12CrkMWaARtiP9n591zPvKIpCRMJV1WIx/fNX -1523YFpo2Y6ViYUQOlJe9+q2A2P9FMEZnj1z8ku//oHJbOKUIp3Y6fFc/5eXSiobx1Du4TFd/hwA -6lo7rn/wX5IkCaJAFWowGP52yxVXnDNnPDQNAOwtLt/w9ufjZ0p8e/HMe3/4bUEUOaWiQd/Y1n7D -X16WVfpfx4BQ/Pv797/U2OZChKgqFUTxrmsvuOGixeNnRwsYG/XjlY+lzfn2K1fccskyhLGiqIjg -3V+X/WLD2/99O4BzAPjTKx9vP3g0NPUzcjN/973zx9VcQQiNX0ZoKOb8yPorzp0/PfhUnL2wZffn -B0s1rPC/hQEIoYLjNX96eYtm9gAAIJxgj+xpF43HUCkLKOp44ihIo/KGn30n2hYBjCFBcHt8dz61 -KaBQjEdbFzWWOuA3z232ByRBFLqkDddk5TgRX3OzL10y+9k7rx3XEIoG0GYm2f96y+U6UeSMAUaH -TlQ/vHHb6J9uzBjw/JY9nx0oAehllY9rurgmH2IjLVPTEmBCxrrzz7jorFnAuagTqSRveGd7SWXj -f1gEaUvdLyn3vrxFVVWY4IDqhKe9/fraNREWiyLJxKiva2p95M1t/2EGMMYA4On3d9Y0tU18XvLE -h/PmZKesPXchFgSqUiD4zc+/Kq1phlFU5oyWAQTj1g7vU+/toCqd4OWvWVb1bR0FpdUT84uUMQC4 -87vnxUVFaHZ3h8//pxc/Gg0+MQY64JVt+RX1rRO//DVw+P3dhev//vrEbAUtqpwaF7Xu/LMAI0AI -KPtoX9Gx6qYRm6SjZYAnIL+9o6Db9JzwIRJsNkx0WdJtVyyPspiBccC40x94+eN9IzaHRku1/cUV -B45VYhIW1p6IJH3KmDJ2wMAQR3yU9brViwCAiET1+bfsK3R2+kbWK2G0Tvyrn+bLkowFHC4pk3NG -AcDtdnPOTyklMMaEEEEQCCEY4yEaOaIg6HX/gcK8759/5j8/2u3x+kSjvqy2eev+r69ZtWAE621U -DOjw+jfvLgTUT/xxCpQCox5nS1lZ2cmTJyVJYuwUqL0oihaLJTIy0mq1ms1mk8lkMBgwHjBaqTli -S/Oy0h0xE2+PzpqcvHLu1Pe+KACMvR2eLfuLr1m1YCRY1mgmsbuovL3T25UODsABGAVVAtkHkhcU -f3WR8+mn22tra/1+P6V0cItep9NZrda4uLi4uLj4+PikpKSUlJSYmBiLxaLT6QZyAjIS7BkJ9onf -AQhg1fxpW/YVyYoCAvnyyPH61o5Eu21CGfDOziM8aAFzUFVQAiC5wdcOXheSPKBKNS3qqycKvF4v -pXRwEYQQwhiLomg2myMiI6MiIxMTEqdOnTJlypScnJysrCyr1TpWxiulTMNuEQKk/YuGexNACNYs -yv3r61tP1jeDQKob2/Z8XX758FH3kTAgFBTdV1zOOQfOQZXB5wJvG3hawdcOATeXfQDgA1CtMTpr -gs5gEcw2wWAhOiMW9YgICDhnjKkykwNqwK14O9SA1+9xuerqcH29IAhms/nIkSPJKSnZU6bm5s1J -mJRptkYqlHf6AgFZIUSQFFVWVZEQgeCArHLgJp1ICCaM6Q16o14UBGIxGQx6MdJstJoMFqPObNAj -hIRw+aZaQdkQjRntmjRHdF526snGNgAOnH+SXzJRDGAcYVRYXtfk7ATGgMrgaQNXDXQ2g88FqgQA -lsRsW3qeJTHLEBmvt8UL5ggsGhAgDhw469LYPOTCMEVS/B65s8XfVutrqe6sKXGdPOJyuU6Ule06 -cMi4ZbvJkR7pSDPaYkwmE8EYEyIIRCcIjc5OZ4c7O9WBEVYppVRVVaowpioUiyIDUFQVGMMABCOr -1WK3Wexmgz3SnJYYl2i3JcVEpCXYLUb9cHcBYxxjtHLe1A92HVYpA4R2HC6dIB3AOMeACk7Utnt8 -QBXwucBVA64a8LoAIDJzTtKZV5riM/QR0QiLTPFTVaKST/V7Abi2WLp3sibBOAAChLExJsniyOCc -Kz5XoLW+4eC/W4p2qB6XIELWpLiV85OXr1gxM3eaUa/XVLpOFF7Zlv/ujoK37r0RY6yqlHEOCAVk -JSArjIMvIPsl2euXvH7J5fY1dXgaWjsaWtuPVjfv+Lqio90jqUxn0MWa9akJ9uyU+FkZSbkZiY7o -CHHQrOyQBjp7RibCBCgFzhtaO042tKUnxPQpPBkPHcAB4PCJakopSB5ob4D2evC6iN6UecH62Jkr -sKhniqQGvJxRQEhb6AgDAAIOgHiXHA29AC03nTGJKQEARERTROo0a1K2I+/cik+ec7tqT5QejbJZ -01MSp2dnCiZjqDiSqrTD7dfM1pBs0QkkwmQY/BkkRfUH5LZOb2lt84mapuN1bV8cLH3jswKfJEVH -mKdNciycmj4vJzUzOa67pozSUOWZRuLcjKTE6IiqplYA4MC/Ol6dnhDDgQ8dBh4JAwRCAKCysY2r -CgQ6wdMCnlaiN0777n1RWQsUbzuV/QCAiciJwFQFgPVY8n3Wfpcs4j0QLc6ZEmCKHyFsm5Q7/Zrf -l777UGN18b59+8xmc3x8/KJFiywWi5ZxrjKGEB9BxE0vCnpRiLSaMpNiYWGwUMnjl45VNxadbMgv -rnj1k/zH3v7coBcXTElbNid7SV52fJS1vy00IyulqqkVEKiMFZRUXLF0NgxnC4zQCnK5fY3ODqAy -BNzg7wSAtFU/ispaIHe2AkJY1ANn3oZyQNjsyOCMMjUAHIWRPH2p3+M1B86Z4msXTdbJa24p3vgH -l8tZUnK0pKQkOzvbaDRiQgBAocwnjZknbDHq5+VMmpczad3qRQBwvKb5q2OVnxaU3v/Kx7974cMz -pqRdvmLemoW9ysrmZqV8uOsQICTJ6slGl+ZRjrsZ2uRyN7s6QFVA8YPk1Uc5YmetVH2dgBARDYGO -xvIPHnXXHkOAbBl56ef/WDBYOFX6UZ8PTP2Qlkaq322Iik8587ITH21obGqqqKhoa2tLSEjQysdW -zMnJGDtHjAMEnXYOhODslLjslLjvrFrQ4fXnH6184/OCnz365u/MxmvPXXDDt87WWuPkZiRiIjCm -gkprW5zD3YsjZIDT7W/r8AKVQVWAytbEbCzqOVUxEThVKj56or38oHZl29HdRG/OOP/HiBCuqgNL -nrDUD7oZTA5Eps0yx6d73C0NDQ2tra2yLGveWVZyXNbYJYYGi3H6UdBmNq6aN3XVvKnOTu9rn371 -/Jbdz7y/86dXLL/hW2fPzkoRCJYpAGPtHe6WDm9cv3K2sQfjOry+gKwihIBTAEBYAEaBc4SJ7HG6 -ThzoaTC7SvcpvnZMxH70HZz63UqBygGdNcoUk6iqamdnp9vtVpT/TJe+6AjzrZcuPfjcr3921cqH -Xtt2+W+eqahvi+hqi+CTVXdAmgg01B+QOWOhgizOaLCbBnAIoTddCSNIEBB0oXW8t0PZfx9AP05w -DsARwggTSqksy7IsnxJZGsc4BOMA8KMLzyp4/tcZybHX//lFbyBYseyT1Wanu4/BMS4M6PT5AXiP -eq4gEZmqiOZox7wLg84lZwAQP+d80RTBVKlLAfT0Bnqv/W7F0JP6QcuIMw4hGd013L5AbbNLUXtB -HZxzlTKVMkoZZazPR5QxyhhjvM9HmosT+tPfVWZcM7eC/40wGa5aMb/V7fEHJEAIRKG5zbXuvueP -VjUOPTwwQgYEZJUy3sPY5RxxzkGT8inLrk1Y+G1TbKopLi3l7GsSF17MOeVBMI6fau3zHtYR76kd -g55zaNtxAIBtXx373n0viAIJJVFpalAgWCCYEEwwDiH12kcEY4IxxqjrI94FRQBGKPSnz3ZFCGlt -iLQJIIRcHv8PH3jJF5Chy+xBglBaWXfzX18bdyXM+4SheZd5g4DJfizoM1bfHHDVI4T1tngq+zhV -T0F9Dv1sU957e4VgC+hBB6htad9/rPLnG95eu2qh1rAJIVTd5Hxj+yEG3KzXGXTC0rzsrCS79lGj -s/Pf+SUGUYww6wVCZmUmaSWxCCG3L3DgWJVeJxh1olGvc0RHRFlNQeEekOta2gWRmHSiQSdaTHqC -8eZdh4/VNPYsGOCcg0D2FZcVHKuaO2XSODIAI9wXyeLdSB1XZUUNiOYo4Ezxd3S5hUOx/Xk46vex -VoFzrtfrAeD+V7b+8/0vL1qcd7K+tcnZGZqPNyAXVTY0Ojs45z6/FB9j0xigGdAvbNmrKDLn0OTy -PHjzpVecM1v7qLal/ecb3vEHAoxzAHTfjy6+ctlsrRazqLLxtsffdro9Br1OkugLd37nzOnpVQ1t -arhUBEbpyUbnmDGAc84YUxRFURQN04+MjJT9HqTKXFFAa4ihyW8NZeMcECCOuKoA4ghQEHXg0IVD -8B7bh/d2CMJSP3QdBwC/39/Q0FBSUhIdHZVggr/88Nwls3IUxjjnTU1NVqtVp9NNSY1/+VdrNYXJ -OetpmM/MSPz07z9hjDHGOaVij2haTmr8F4/9zC8rkqwEZFXbGdpSm5me8Opvruvw+gOy6vYFcpLj -ACDCbMQY949zMEA2i2FsRJCiKF6vt729vbW1ta2tzefzcc4tFktF8WHibuC+DpB8mlAgoh4445j0 -U7Ah6nWxQVvSVO0tlwajfsh9UxSlqqZ29+7d9fX1ZrPZYrE0A3y9b4coikaj0W63p6enJyYmxsbG -avEDjBEA6Y3jI51Aer7ZM8QWYTZEmA19cH/OwagXMxP7hn2WzZ1iMepdkgykWzZyRU1Ksi+cmj5U -z2OgOAljzOPx1NTUlJeXl5WVVVVVNTQ0eDwexpgoivUtzkMnakCVecANsk8028zxGZqF0iti0C2X -emkzVfLF5i6Nn7kcgHPGBpY8XQYQo6I54sQHj7eU7EZIiI2NNpvNhBDNGMUYY4wNBoPdbs/MzJw2 -bdrMmTNzcnLi4uJEcVzCxaHne+KdHbc9tpFRFmw7QanVbHry9u+uPW/hqHaAoigtLS3FxcV79uwp -LCysrKxsbW31eDyKomiGBKWU+3zd13s72isODesZjFEJ8bNWdGnvwajPewkoEATu9bl8vvYegoUr -CmcMiaJYWlpaWHikuLh46dKlZ511VlJSUthw5ihjaqGf9gWUCJOx0+djXGtaZ3rkJ1cPnfrhGaAo -SmVl5d69ez///PP9+fn1dXVerxcEvdmRoTNHEVHPGeUA5iBJUG+Umvfy63tur9BuQJjKPlvaTAi7 -9jnvCYvyHsCRZpuvXmr/2++mO1tljIOXEQytTjm/qGPrztZd+XUtLS11dXVNTU2KoixZsiQlJWWs -eKBJJI36Ow6fuPsfmzHA32654rbHNnr8AaAsNS76qhXzYTj1EH0ZQCltbm7es2fPBx98cODAgYaG -BoaIY+GlUdkLrIk5gtEKQcM5HJ6sIfu8tzfbl8RBqjE5oKHW4ez9MNQPbYK4GH3W/Gho8ANB3QYS -hnMvSvitU3781erf/u14fX0DpQfMZrOWYOFwOMYGKUIAAJ/klzy5+csjJ2ovOTvv7+svP3iixq9Q -AACMIywmrW8CGhkczTnv7OwsLCzctm1bfn5+U1OTPjY986LbrMlTgTPV75E9Tt7XghzElOwF8fcB -QbV4eD+LaCDqd99JVRl4VdVHCe7W85xz1q4IBrz+poy8KdbLbz3U3NxSUFAQHx/vcDhsNpvRONrW -J+X1LW9tL9i8s7Clw3PegqkP3XRpVkocAOwpKqOqqu3E5NjIkYckOed+v7+8vHzHjh378/Pr6+vN -ydOnffdenSVK8bZzSgEBIIygK4ClRbFRtwsWPs6lUTBEa8QB0NDkPvRGJngPxBJwryauHAFgAbjC -UGvg7BX2TRvyzl9XUFtbu2fPnuTk5OTk5JSUFEJI+NieBj9zzoELva/p8PpP1DRv++rY9oPHTja0 -pcRFfve8RZctmZXQo6nTkfJ6bYYGvT4nJT4ULh42Axhj7e3tRUVFBQUH62prRVt89uV3iWab7HZq -j83DLPy+Int4aPOQ1j7vAVXwHl53H63THd+HJmnxMvvdt2b86qGyysrKgwcPzpkzJzY21mw2h8Wf -uzIog9+ub+s4Udty8FhV/rHKstpmT0CeFB99zuzsv/x4xuzJyf3vcOhEDSAEjAsIzZ2a0UctDoMB -sizX1NQcOXKkoqLc7/dP+dYvDFEOxeMKKtNhUB+GhDb3eqc/9ft5EqHXCMLzm/e4bYd629qU596s -q6rvKC87UVFRMXv27LAM8EtKZWNbWV1LaU3zieqGEw3O1rYOhlFSjG1GRtLFi2fNzknNDhdv0CzR -Zpe7rLYpOAEEszKTYJh1Qd0M8Hg8FRUVR48edTqdUZPn26cvUb3tI6I+H5TW/TG4sGu/nx8H3Z0r -+3yrL/UBQKL6SHH9dSm33Xu8qamxqLgkc0pujCO5pdNb39pe39re3O6taXE1tbg6/XJAoQaCYmNs -aY6YSxfPzJucnBofnRwX1bP7maJSQnCvfmicA0K7iyt8GhaNUXJsVJojGoaZJi2E5I/T6Tx+/HhV -VZXP58s645Kg0BmZ5DkF9fmQJU9PiLS3wucDU59zYACMr1kcc79drKpufHnzts2Fzab4NL3FatQJ -kUZdjM2Sk+JYPjs7JTYyKzk+NT7KMGiGb/8sFc3Q3Lq/JAhcM75y/rSR5wVRSp1OZ21tbXt7uxgR -Z0ubRWU/BFHc4UsePiZyv8dXEHCmMqpqwHevW/EBfsirZqYalyyIeuvfLWl28+VrFl5wwfmpiQnR -EeYxMUkxxrJKDxyrpCoFjDDB5y/MHcl9QgqgpaWlqanZ7/dHZMzFWOBa+GMkkgeGJHlOqXV7hGgw -Jqrfo/g6AcBowN1f5+GgbO0jhYGVzJlqxRgbCHJEmlLjojXq8x5RF85HXt716VdHS6sbAABUmhQX -NXNy0sgZIElSS0tLS2uLJEmRGXmAcSjKOHy5zweWPEOnPu8pf7Co97saPY2VAJCeZACVBZOxB1Lm -2vsSy8sx60Xe0dHR3Nzc3t6uqmootIKDAZaR11Zt2V/sDUiCKABlaxZOTx5R4y5BI0IgEHA6ne7O -TkqZPsoR3lQfrdaFYVC/V/IoRoA6qouBSQB4wSwbBCgK/0PQ7RsjAD/NSjHG2XWccy09ewzLCGqa -XR/uKQRKEUbWSOvqhTNGKMpCDHC73T6fT4ywC3qzVtkyCEgQJrdniDbP8NY+AOeCwexz1jUc2gYA -cbHimbMjQWYIhXGSe4RuAICDwhLjdDbruLTy+HBvUXVTGxFFxS9Nio9ePHPyyBnAGAsEAh6PV5Zl -0RyFiciZekpoPozIPjX1u+822NoPyhaOEBZNViXgLv/0RTXgBYDrLknUW0WgfeQ+D0N9DsC5XkSn -yrIdISr3xKbtXKVUpYLRcOEZM+0288gqNbtFkMfrkWVZjIlEggjdCuBUkiccfbsh/l5yv5tSfFBf -DBBgIhCdkVO1s7b05I5XPY0nASDJYfjFujRgDCgPJ3l6K+GQtcrHmPQIwbMf7iytbgSMgLJos/WG -i5cCDCsjtB8DVFWVZYVSKuhNmAjBzvtDkTz96IuwIOj0SBBQuE95N76Pgkm7vEfSg5YayFTZ7XLX -lTnLDzYXf8nUYA7Wc3+eHpdgAI8yAPUHhgLHNBLgl5W/v/kp1cAxjC9YnJfuiO6Kvo2UAaFkG0TE -HqdUDEfyICCiAWFB7mhpq9obcDWoAS+nKuth6Gnk7TJg+mVZcABAnCqq3y172v2ttQDBPjRRNvHF -h2asXh4HbqXvl3m4bDve218bUwY8sWlHWW0zIADGIy2mO65eNZp7Cr0zbTQ7AQ1WSdFfYXIAhESD -2ddSXbf/fVd5gexpH5NHx4RERpBvLY/9/frJaWlG8Cg8lGM9yKofh7Ufcr4qG52Pvv0ZVRSiE2lA -vmhx3rQ0x2gZENIeCCHOGQAbAvV7PbNosDYXbS/f+g8qBeOURgOaPtWWmWJ2xBl0Yo8K5r47oE++ -IkcAnCNCUFSEMD3TfNb8qKhYPagMvGrQCQ4veSBcfhEfc0bc+fQ7dc1OQS/SgOyIi/7V2tWjvGFw -B4QSnZgqc0q7+w6civqcU50psu34/uPvP6J9Q6fH63+YddGK+LkzIi0Og3bKWsiwGVBq8z6WDAfG -IUBBYuBWgPG+uZa8txroa7z2gWPHZrz08b73dh4CBKpMsSD+YM1Z00bdqijIAFEU9XodIYTKfk5V -TPR8aBYnEfWB9sbKz1/S3p09M+qpB2YvPDMWOId2mTYGemd89ggUh8ESQlI7GLrRArBocM4NjkuP -RQ8RLcBS19px51ObZFlGhHBZyUpx/Pyac0ds/PRlgMFgsJgter2+09fBqQzIeGq5DxwAsKBrPPSJ -v60OABYviv3wpTNtCQZwyVxhCAEhQaECgIIveCgihrpI1vXfbjES+ko4UHoY1IfR6wPOOcaIA6y7 -/8UmZwcgxCkzmY0P3nJ5tNXEGMOj61KCNd1iMBi0gnTV28FUBSE8FOoTUR9ob3aVfQUAsXbDiw/P -tcUbWHMAVIZQWIk8iPnYL4zTf3OEp/7AuDQCb4BKSq/U6OEH4hEA3PboW58VHAXgCNJNMJkAAA+w -SURBVGPg/KrlC7QuwHjUPWKCPTH0er3VajEajbK7jUp+1Cf0EY76wDkWdAFXg7e5CgB+cPWkzBmR -vDXQNaVT6cOBhPgpYc6+1OcDRgVEVFUXcLWrIT03XJ9LG0+888XTm7czSokocEnOTkl44OZLx8yy -CjEgKirKao0giMvu1kGp3+0Rcs4CLq1rHfr+FangVrp2zkALeQRCfJCvDEZ9zgFM5Gilr6FFFkVR -r9cTQobOg5Bk/2BP0a+e2aS1YaYB2R4T+cwv1w6rCGlIcLTBYIiJiYmOidbpdO66Us5U1CtnhPeW -BsEIOFcVubMVALIyLDkZVmD9qdbTPOfjQP0BYzKa6t5f7EGYaE1YDAbDEBnAu8KK7+8uvOqeZ70+ -PyKEylRvNPz5xkvPmZ09htZVkAGiKMbExMTa7QaDoePkod7WOh+ohotzzqgEAOkpZoQ4sEFI2Y92 -w6U+hKU+H+ArAAbsbpL2HHGLohgVFWW324fOAO2izbsLr/z9P/yBACKEU4YJ+ekVq3500WKA4WY+ -DI0B0dHRCQkJERER/qbKQHsjwkIviKYXvUJR8mBJuMVEBlv7/YU4H4IK5eEiCkOkPnAwk4PHvLsP -dhqNRofDERsbO6wExZc/yV/7x39KkiR0nat09Yr5D9x08dh71yFdHx0dPXny5MTERJ1Img5v03JA -+0uesG5Bt9Ie6kLmp1ahI6O+doEA4KOPbWwQRTEpKSEnJychIUGn0w2+A2hXy9kHX99240P/8ni9 -iBCqUgB07bmLXvndD8cF3gi9slgs6enpGRkZVqu1tXhnoL2R6Ax9pUqfRd0LRh7WQoYhBNbhVNSH -sBExzgGswmd72t/9zGmxWNLTM6dOnRoTExM2M67nIATXtXZc88d/3v3cewF/AAkCZ4xzfu15Z7x0 -9w/GqSVXNwMMBkNqampubm5SUhL1uur3byaiAREycAIo7y0Ix3ohc36Kr/RHOznnHFAEcdYHfvKX -kzqdLikpKS8vLzs7O2xWVp+xNf/o0vV/3fhpvqwoSCCcUoLx9Rcuefm369A4wXs9GUAIsdvtM2fO -zM2dERUVVZ//YevRXTpLNEIkXOwJADjqZcVPJPXDqBDOOBCEogS3U776rhMlFT673Z6Xlzd//nyH -w6H1NRhotHZ4b334jUt/82R5TSMhGBACxox63a/Xrnn2zrVBnHh8GCD0dPmsVuu0adOWLDm7pqa6 -oKDgxPuPYCzE5CxSAx4tTShY8BUkFtYSO/qRbCCAvhcpGdMSX4YcWukDS/S4p5aZiyIFAMg/0HHz -n8sPH/XHxMTMmzd35cqVs2bN6tPujAOwrsPMJUX9x/s773tpS3N7J3CGBEIZA8rMZuMLd627vKt+ -b/yaAvZiACEkNjZ2/vz5NTU1bre7tLT02DsPpiy+In7mCn2EnVGlq9mD5uhjJAiICP1s6H7Js+Go -j00EjLiHjhlgW/BTQRoIgPLOZqmw0LV5p+uxVxs4J7GxsTNmzFi5ctXChQtjY2P7AAYIgGBMGX93 -5+FH3vx0X0mFdng0hA6pR6BSWtXUBuM/+m5MvV6flpa2bNkySZKIIBwtKanZ+UZryR5b+kyLI0M0 -WLU4IuccYYwwkTpaBoUKwlCfUkYixZdfq317a6Ne1+f8Wd7n335SvpfExwgEgvwybWhTS8p83gBY -LBGJCY7puTOWLFmybNmysOUxAVl58/ODz36483BZtcftwyIBjIEFq0sQxkBIQJLvfOrtwrLap+/4 -rtYVZZyO9AwjGc1m87Rp0wghUVFRX9rtx0uPNzc3tR7e2sg4JmJXuLhrMkzRwvADAG1hhDijQMzk -k71tH2xvGVlYShAEQRBMJpMsy1pHUp1OFx2dMCM5OdbhmDZt+ty5c3Jzc9PS0noW6VHGSyobXvz3 -ng92H6ludkqSTBAWDKIqqYCxUa8/a1bmvq8rPD4/whgLAmf0X1v3nKhteuMPN6TERaHxOYhFCPuE -MTExeXl58fHxkydPLiwsPH78eH19vcvlCnXJ0Cr8A4GAy+Xyer3hwvfh8kSCzTwAfHTJ3MiahoBe -pzUF6LfMUS9TlXPQ69DBo56GFiU2NjYzMzMxMfFkfUtRea0sqICQIuiIMXLW4uXXXnZRenp6XFww -oVxRaW1Le/HJ+vxjlVv2fl1cWR+Q5OA0BEJVChKzWkxLZmX/7KqVK+bkfFZw7Oo//LPV1YEQ44A4 -Z3uLTsy//k9P/+Lai8+epT31xB1lqChKZ2dnU1NTXV2d1qXH7/eHGEApbWhoOJCff6y0dPUS26an -5gIHUNipFKm2YTgyYNDhLsd40NAuB1AZxOpuvLno2U1NKemTzzvvXHtyxpPv7ezw+bUSCyAiYDHB -Ef/avbfaIqx1be7qhpajNY2VDW1HKxvqWl2BgNxVqNOtnGwWy3kLp19/wVmr5k8NPXVhRf2Vv/9H -aWV9sHkQQlxRIqzmmy9e9sBNl4y5CBqMAVqqhKqqkiT5/X6/36+qajB5AiFFUYqLi1977bXt27cv -X2R8e8McAACZDpYh0kOFcq5hR9Av6hLGeFVVJtp16+4oeXFzM45MiM6aTSMSXf4eB3YgBAgDB7vN -QojgDkgqpYpKOaXAOBEFDlxrM6N1iI2xWdedf8aVy+bNyU7pGfbSFnij033NH5774vAxzhgiBCHE -ZIWI4vK8KRvu+E5Wcuz4iqCedpHWUVhrKhx2f1it1i4PM2zMZICogGZWY9SN+qEupjBODBgQ4n7K -ATDuWrgoWAvNALW6JSAy6E199TLnre0e7bYIY4FgxjHllKoUi4JBR2KjrGdMz7xq+byV86b0aavY -1QsRAYAj2vrpwz+9Y8PbT723Q5JlLAggEErVbQeKFt/8l9//4KKbL14aPPFw1PJo5HmToRAH53w4 -1B8gwKs9DkEkQqw76ZMklpFpQirjfoYQDyVMdGMewSIB1n9amj3DGacIjAZ9hNk0bZIjNyNx0bSM -5XNy4qKG1AKZYPTw+ivyslJ/+tjG9k43EAIYA4ZmV8dPH9v4cX7xI+uvzEi0w6g188gZ0DOdq1+9 -ysDUBx4+IsYBEQSY//rB8re3tigKW7og8pGfZ0TaCA/wEPJBCImMsESmOGoVncRYX8OQ0ogIS1ZK -giPSMskRk50Sl5FgT3XETE2N14kjedLvrV44MzNp3f0vHimr0eKRnGCF0g92HvrqWOXd37vw5ouX -jHIPjE3mMEKnyhA5VXCRUk4ihfserrj/mSrtvcp3G2WZvfbXKSjEV44wxtPSE9bfeIlkSbzh7xv9 -Ph9gjDDmjAFjkVbLpvt+fEZuOufcpNf1zmxgPNhNbXj0mp2VvP3R23/z7OYXtuwOBCTACCGERdLQ -7Lzt0dff33Xk4Z9cOSU1vqf5NkIsaGQiSBAEhJBAUA8VOlBEbCCkgQMAEZCnRdq4tTmoUAEAYMuX -zqIiNxhxz/pInSgY9Pq1q8985LarY6IjRVHgnOtE0WGPeebO65bPyTbqxD7Uh+DpEHhkYeEoq+nJ -2695/Z4fpTjsodNukSgoqvrxvsIzb3rgjy981OENjCxKMyodoNfrDQaDKIqdHhUwAIXBAiwQNrzV -/Q+l3TnBoapIWWVdEooLOLiW3W632+2+4YIzF+dmbN552On2OSKtFy+dnRkUymN2nFPP+1y8eNbs -rJS7nnnvnS8KJEkGgrVOZa5Ozx9e/OC9XYf/8Ytr501JnTgGYIzNZnNUVKTRaCz4usHvU41mAjIf -GTLKVGaL069aGFVywhuSTPNzrXNzreAPxoVUBtoZAzqdTlFVAJg2yTFt0uqeagmN27k1jPFJ8dGv -/e4Hr22b8etn361qbAPOACEkYEBw6GjFlb9/5vALd5+yZ/VYiiCr1ZqYmGS32zs8/IV368BAgkGl -4QdYEALwKn+9Pf37lzisFmIxk5VnRL70xyzAwBgIetxRLx054dXpdDabLSIiQieKGrrwzpeH73xq -08bPv5IVOt4nSmoWx3dWzT/8/N0/vmS52WhACHGVChgD8GZnp7PTB8PMPhqVEjabzWlpaWlpaSdP -nvzDo2XnL7GnZ5ipUyYEhgvxIwRc4oIBvfBAzj03pvglNiXTCBy4l2LMwSZufK/5q689SUkJCQkJ -2rkmAHD3Pz945M1P9QRLdMfer08++pMrxhW51LovIgSRFuOGn121bs0ZD7/x6Ud7C70ByWq1/uDC -xV2F2sNYB+See+4Z8YQIIVqDlfr6+qrqps/2tl641B6ZYACV9UpRGUp4iwNCHCgHhUXaBHukCBIF -hSMRgV387HPn9feUAdJlZ2etWLFyzpw5Nputusl17X3PX3hG7vsPrm9pd2/aUbBm0YwhmvljohUS -7bbLls5ekpczJdVx5fJ5d1y9Cg9/C44qs44QkpCQsGDBgkWLFqWkJBcf9y2+av9HHzeCmUCUCGbS -lRrNhxERY5wHKMgUTAQiCOjQq683XPLTY34JJycnzZkzLy8vLzo6GgBcHh9G6LKlc5JiIq5cNk9W -1Jb2TpjAoWEbZ+am//zqldeet5BgNILUx9H6AVarNTc3NxAISJK0e/fu2trai28qvHB59K1rU2Zn -WaIT9CAIEKDgp6CyrpN8OOpBd+0vjAEjAAGBHiM9ggBztcq7jnT+/ZWGLw50WiyWrMnJZ5x55urV -q6dMmaIFeDkHQRC003M55whhOrFHe/avSRqBEhJGPQkcHR09Z84cxpjNZsvPz6+oqNi6s+39Tw/l -TbOsXhKTl23JTDKkOvT2WJEYCJDeKRShskiVgVdtblPK6/xltVLhce+2fe1Fx/06nS42Nnby5MkL -FixYtmyZJnxC+lxVVe3UDA0oJhN+mtj4gnFDF0R2u33BggUxMTEZGRmHDh0qKSmuq6uvqG3/63N1 -qqrGRwsJDr09Soy2CNE2wWoRdGIwDKCoXJKox0tb2pV2D3V2qLUNAWcnI4To9fqkpKTExITMzMkL -FiyYO3duVlZWdHR0KLvEZjYyxrfmF1+1fO4nB4p1Ag41uj2NBhqrc0gppX6/v62traqq6tixY+Vl -ZdXVVQ0N9c0trW63R5NRrF+z7J6bVwt1GY3GiAhrdJQtxh6Xnp6RlZWVlZU1efLkuLg4k8nUJ7fn -1oc3PvfRLpNBH5Ckq5cveO5X12H0/5UBoSFJUkdHR2tra1NTU21tbVNTU2trq9Pp7Ozs1LpeDsQA -7RQ3LY8zLi7O4XAkJyfb7fbIyEij0dhHvGo+l0rZhne/2Hnk+Bm5mbdeeo5eFMYjaniaMQCCp9VR -SZJ8Pp/X6/V0jVA8JywDtDCvxWKxWCyhsyQ1rAn+dwcav6OQtTszxhhjlFJK6eCnLmgNcAVB0F4M -0aigLNiDEeHTklFo4o9l/2aMmSP2zRj9+D+DR9d4QOwo3AAAAABJRU5ErkJggg== diff --git a/Golden_Repo/j/JupyterKernel-PyQuantum/logo-32x32.png.base64 b/Golden_Repo/j/JupyterKernel-PyQuantum/logo-32x32.png.base64 deleted file mode 100644 index 83d4dafe09db57bfbe013f45ae19f261b98dc6ce..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyQuantum/logo-32x32.png.base64 +++ /dev/null @@ -1,35 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAIAAAD8GO2jAAAACXBIWXMAAC4jAAAuIwF4pT92AAAA -B3RJTUUH5AQCEAQQe3XrbwAAAB1pVFh0Q29tbWVudAAAAAAAQ3JlYXRlZCB3aXRoIEdJTVBkLmUH -AAAHLklEQVRIx7VWe1CU1xU/937fvtiFXWBBdkFZgSKsRLSAJpK2GmpoJsRqh1hSJ5OmHZO2g7Gx -Gm3TNkmn0Zia2CQmxjhtOqZTTVLbamyc6CDS0EokQTERkFd5dNkluyz7/N733v6xBhDXDv7R8+e5 -95zf7zzuPQcxxmDOEhPluCRn29IQmqsJhluRuCifarsSl+S5m9wagNVi0uk4g47/fwH0jIxPRoRw -XJyp/N85RjerAQOYyvPZi1fPtHdfHfG1dPYFe4fLq8tLXc6yhc7aKndlST4AUMowRrcGkJBjLR37 -3m3yh6LLiubfuaTIH4519I7cXemOS8o/P+vvG/3c7XI89XBdeWHeLE4zA0wuqqZ9d9ebjnXbd711 -yuOfTCgJpV1D3qk7vaPjjb894lz/xN4jpxMaSuksP8kBQjHhq5v3Vm7aNToenKmXVS0Yic/y0nKp -17Xhp1tefvsaxlwA7tu5f9VjLxAym46kqIFwjHwBMHU84PEvqN/57OH3GWMaITcFSFg+/eZJS+1m -TyB0Y8jhuNgz4tM0ciOnDzv77HVbz18ZvGmKEnyHfUFDzY8KHvg5TRaZRogoK5QmL9vW/e+seuyF -Wcrpd0AZBYADx8/JowGDXjcxMSFGQ7FwUIhFpu4QQn0T4Vn/BGWMUgYAOzd+499ef9MnPTNPp98k -z3ERQT7X2Q/RkfGPx378ZAjxegYAqpRtxl+pvuO221dHBEmSlcmYCAAIoXRLSl62jcM40Z5ZttSa -ytJ3mj+uqShJAgAAvZ7AlbZmW6bZ+eW7L8gGUDBijMiC5Jk40nzwe2u7Nn7/h/7JiN1q0fEcoTQq -Sh91DykqIZTOs6WWFTjXVLr3/6U5SQSMMYTQ6Jg36g9U3PPgaPtJ68Jl88q/TqQoQghx+rF0R8v5 -lsZNYnFFKc9N50iUVUIpx2HvRPjDzn4e44lwLBKX0szG6b8o4R0AEMcDGPSp6ZjTcRyPMKaqTKQ4 -EUIpKRbNYA2EIhIgABgen5RVzRMIAQChVJSVLKtlafH8i30jPd1Dx1o6ErUBAD7hXYhFLrW3nX6v -CQIDga7WzNJqIoS9F46nu8o5vREYNfBocjK0e8+rhvR56+9ZXbZsKcacKKuyUZNVjVKKDdio5wVJ -gZjY7/EneANCPEJodGjwJ08+c/FzjM3pBavWA6+TJ73AKAAwRoBRYExWcJopvmF5q8VsPHr4mHd0 -+6OPPFSUmzWVYVFRh30BV459fllB47dWAQCHMQDwwNhvXnqt2W/LrarVI6LnEFFloAQYA0oYUZmm -AkKyrOZadevuyoBss3uB94kDL7qK3SuWLwlF4r5gJBoXMUKVJfl589ILc7MdmdbpIvd1f3phMJi3 -dK2Jo1RVNI0iRoExYDTBHYAhhABjIRYXJTAFqdPtuNPdd+KDc7kFLqIqi105qSnGRKqbO3rcLsd1 -XeQbHw8pXIbZQoVJYBQhxIAXJIUSihhjFDEGiMO+visVGcOmjBIa1bCRd9o5o9VetiBrZhOGYuLp -9u4DW79zHYCsEg0wUDVBmQHvudxqDHdaLRgQBkoBQIhFau2BvVvyQGWIUSCES9Twi4mWaMLn//SB -I8M685UBAM9zGDMCjDGi6oyWwbYztZnv7Xu2wJZlBEUDioBRICnAuQDzLK4ioMAwo5RQOnP6/euz -gdePt5zcs3n2TM6wWc081RQZA2gaS1P7nv6Bw/alLNA4QDrAPCAeOD0QDIKKgAFjgGkgpMYl7tpQ -RGhgzF//y4Nb6mtWlhUk/qVpgMLikqJMnbf/U86UplGcm21CgCGmgUZBpaBDYMCgR2BAYMFgAcjA -oX5fa5d55YqqhIvWy/11O/Y33FX11MN1AICuH868OdX6SMN93c8d8lCJ8am6aAgjfK2FgHZ2RARB -Y4xxiJl0KEUHwXD8ZJvxa/duq6osuzrie+nPZw8cPX2bu+DFxvsBQNWIjueSDP1/NDf97cSJiYgW -8n308raU/CIrxFUA9vyhwVBYcdj1k2Hx7bNS14QTON3a+nW3V9/R3j3YPeRZ4V5YVeLa/cdTFYvy -f7fjIbvVPGv6X7dVEALP/WpTw8pLhSVZENUYoyidk73SW38ff3CNZc+7bHdHpaRC9fLSYmfGYpdj -TaV7SWEuAFwd9dX/4o2xQOjVxx9oqKlK8ptSShEChHFUQIIgggWBAkhlIGqXe6PPHBxanJtjwFlv -bP02MZnrVrjtttRrnChljC2an/PJoZ/tOPjXjb/+fWf/f3Y/un72ZocxZgwwgqVV9/7hfdxxZiA8 -HqKSCFGhzIWaXsm/1BsNsgpnXu7F3pFMq2XKnsOY5zgA0Ov4fY33n39tx+pli5JPNIwxADRs+Cbm -9IfPHU3Xe8wGlUMgKbq46iApG7Ztf9wvaVlWi6ioKQZ90kVtealrTpudrNC+gZFoTAAGPM85cux5 -zkwA0AiJCLLNbLrZonjjfvdfnaDvjRTC2QYAAAAASUVORK5CYII= diff --git a/Golden_Repo/j/JupyterKernel-PyQuantum/logo-64x64.png.base64 b/Golden_Repo/j/JupyterKernel-PyQuantum/logo-64x64.png.base64 deleted file mode 100644 index cbb1ae369d4c71500d7fad8e5565d6b8d25401d9..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-PyQuantum/logo-64x64.png.base64 +++ /dev/null @@ -1,87 +0,0 @@ -iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAIAAAAlC+aJAAAACXBIWXMAAC4jAAAuIwF4pT92AAAA -B3RJTUUH5AQCEAMhZep9kgAAAB1pVFh0Q29tbWVudAAAAAAAQ3JlYXRlZCB3aXRoIEdJTVBkLmUH -AAASyUlEQVRo3u1aeXRURbr/qu69vXenOztJCFkgCYSQhBB2RUAWBYawKYorOjoMigviuM4ct1Fw -1FGBYdxGBEVZBNkc9kV2CJiEQEL2naTTSSfp9HKXqvnj9pYYksCBee+d8+7pk3PvTd2q71ff9vuq -ClFK4dZflFKE0K3oGd9q0XNLauosLbdI+lsOIL+sLuvVNdlXKm/dELcWQHJsn+mjU3hBAoBbZKq3 -FoAokRqz9UJRJQDcIiO6tQDKrlpMenWAVu3ixVs0BPrvRKH/Az5ACBUlqdPLVVuPDH38r/e98WV5 -ncX/vUSIKJGbMnU3WQOE0vyy2uIac31Ta15Z7eoNe4AXgNAhQ5Memjoq1KSPCjEmRYf3CQr4X2dC -5worfzx6/nheycWymqbKemizA4sh0AAKDiiFlnawOYBjINiYGBuRNiB6cuagOeOGBmhV7kwHgP6b -ACj1RZWD5ws/+GHf7pO5UNcISgUEBSTHRGQmxZj0mtXbDrvqm4Bjp0zIHJbYL6+s9mxBeV1NA1jb -QMGFxUctmjluUda4UJP+hjHcCABCKMYIAM4VVLzy+bZ9B89Cm13Xv++dGUkzx6ZNzEjqG2qSWx7I -Lli393R8RMiy+yapFJz88kJR1c6TeTuO5569UAgWa0Biv1ceuPuZuROUCtbb8/WxlBu4BFH885fb -4fYnIXam4a5nlq3eXFhZ32XL1nbHtTr5+VT+jJdWQfoDEJc1+o8rzhVUyO/J9UhyIwCqG5rHP/MB -JMyBtAVPvL/eKzohhJAOo7fZnau3Hi4or+vUgyT5mu04npv66FsQn4XHPfn17hPerm4+AHnUi2W1 -/ea9DHFZgdOf33Lkgve/XQ7Z7nSt23OqpNrcZYfeTwRReuqjDTB4PqTe//ba3e7heofh+jSQW1IT -nrUM4rPSF75dVFXfY3tCSJvdyYtibzpf89NRyHwYBs9//Yvtns9vKoDK+qboeS9D3Myxi1c0tth6 -VDTparK7b/nt3tMw7CFIuW/llkM3zYS8vd/21PuQMCdt4VsNzW2UUlGSevzW4RJ+PnWxuqG596N8 -tOkAJM+H0Y8dzC68mQCeW7kJkuYFTnsur7Sm90prszv/tmFvfu8+8WJY8vEPMGB2v7kvNbfae/wK -98iHEcD+7IKPNu4DjNITowdEhfY+RitYZlRyXLBR36uU5KkZViyanTl+WMWFgte+2CYH+hsncyyD -RYm8tXYX2BygUQmi5OSF3mdrBceOTokPNel7mS4RAlEiSo5d/uRsCDau+vHQmUtlCHWXbXtmo5sP -nz96Kg8bNNed5BHIiUwQpd7XxCyDKaXjhybee9cYqDV/vPkgAHTzeQ8AHC7h75sPgCQhL0+5nsrK -7hK+P5DdiUv3zprgjUdnQGTod/8+eTK/9MZNKPtK5enzBaBVSxQAY8ywCGEAIIQQIhEiydm3G0FY -fN0rEnL7xOiw+XcOh6uN24/n3DiZW/LJxk/X7QadBogIzeakCO1DtyWJjjaX2yoQAKWEanW6uP4D -EgcOGjAgQavV+vfg4gWlh8Zd77Xr1MXpv38ndlDc5fVvKDm2a5PrvosdJ3KBSqjNTOsuQ2N5WaXq -3fNHJMQyKi3DqQCACC7RaQPJpVUwIQbVsGGZd2fNG5IxnCBGFEUKSK1kXYJICMUIEEIUEALKMoxW -pdBrVCa9phPPJZQyGMlay0zsl5iWUJhTdK6wcszguOsAIC+kFVQ11Jot0FpLK84oVJqQjCmGfqmK -4GiFIRhhBigBoEApSCLf3txWW9RQnrdu275vftgyeto9k6fPDgoMdApibnFVTJ/gMJPe4XRRQBQh -RCnGiGNZJceqlZxKwSJAgQZtUnRYZIgR+xUFoSb9HWkJhafyDmUXXC8AQAguFNfwrc1Qfs4QHh0/ -41lj/FCJd4qudsnVDgLvlh4IUFBpDJrE4X0Gj20fMb1k71eWnEMjHp87dfo4AFj788nJmQP7BBs7 -DeESRLPVVm1uvmppdQliraXlSnVDW7tDo1KMT09Ijo2Qm6XERQHGJ/NLrhm1ugYAFAAVVddDfQVS -KWOnLtL3HdTw677G/CMhQybqI5MI4YFSjxKAUkESnaKzTWMKS5i2KO+7N9d89nnasBGh4eFRoSZC -AQAkiXjLLoSRkmOjQoxRIT5gltb2+qbW5jb70dzitf8+NTI5bvbtaan9oyDEdKm87joBUACAqvom -sJp1gX20ffq7rPWFm94Rnbbm4uy0Jz5llGrCO91IgcgqQ4BFZ7vKEBQ6ZHzhpYO1dXXh4eETM5Jk -uRkGywFDtm9KAYD6R/gggzbIoAWAkcmxbXbXnjOX3vjXrpjwQKNB29Jut7sEjZLzL2W7C6NyG2ub -HSQCCIjIA0JYoQQAhlMihIESdykuSy+bk6wTQji11ul0Op0uAHDwAqUUueMj8khPEQL54bdRkMHY -qFPfOyHjuXsm/nQs19pobbE7dxzPAQBCSa80wDAYAHhCARCilIguRqlJnPdaS+mvxvh0xLJE4H1y -y3rwwSBUEjHGOo0aAP6548TA6LApmUl2l3Aqv4xh8NCEvnq1srjGXN/c1jfUFB1q4kWptNbMsUxU -iEnJsc1t9nYnHxVibGyxnSkoAwYTu/OVf24dMSg2JjzomonMPyE4XTwAEFEAoIhhWaWW4VSB/TPj -71pkiktnORWn0nEaPcIYKPFzBurxH5Ak0dJYn325qLGhVoVcRBIQQg5BrGm0OnkRAJpa27MLKwoq -rwIAL0hHc4p/OpZjaWkHgNzSujU7jp0pqKy4aqkprmLVSuDYSnNTlxm9QyIjhJSUlJ45cyo/L0eB -YcOBc1fOHOd0+ojhMzDDyXqXKTgApZIYOGCYyhhORN5nQkA5tb7+4uGinasGpowOCQlhEKiU3JAh -iVkzp2UOy2RYtjcpTCKEwdjuEsYsXvHr+QJQcneNTd/45hM6tfKaJuRwOLZt2/rFmlW5l67wqmBG -oaaUBMQOIqJgzjsElAKSjd6tLiI4tWGx6sBIf+llfAJPI0wwPq1GxVUDpQ1N/Jef7di4aftrrzy7 -YMH9SqWqRwAMxgCw/+zlzIR+OUWVaqXiw6fn6dRKGVjXALZu3frqi0vNTGi/aS9qAiMBIaCSh5yg -jhbvvkGYlXg7UNlq3NIDBYdLTO6vWvl6CjIy4BKBJ4VXWh55OffFl98ODQ2ZPn1Gd/zPydscrl9y -is8UlI9Jib97VMrXe06GGHVJ0eFeYF0AyM3N+fD95Ve5yCH3/0Wh0hJJoET0k8wTajpNtshTIvke -ZWukBAElhDqdotpBwSkCgsTMwO2r0m9bcPL9v60cOXx4cGiYvxBmq63O0uLkBbPVll9Wa7U5Rg+O -W3rvnaEm/YYD54SrlpTMQd3lAULIru3bLlXUJyxYzilUgr3F65H+2covYoLPaztI73kEoBQkiYJE -QCRAAeqcIfH6ZQtjnv/g7NcbdwwfN6HeYhVEiVKQiyQnL4pEig0PWjhtTHCAjw6evlQKPD9mcHx3 -AOqvXj1x/Lg6JkMXHCm6bL5M1kF62pX0fpbTwcBAIoQSAhT5vrW45k0OX/FVzY49v0QmpaqVChZj -lmWCA7Qx4UH+69XebNXS7jhxsQRUygkZSd0BqKqqKq+oMKU+6DEb0pX0pLP04NYJ9fcKBEBJe1OD -RscpOQTEB4zwxBCmTOynwAo6a0yySq3ptC5PCEEIYYS9ubawsv5sdkF0UsxgDzXquqCpqqpssrnU -QZEeQX9rOd4pJ54fBSIBJZQSIHJjQomEWSVva267eGzIQJPCxAIveTI0RQAgktREDVDe7uA7ZR6M -EMswDMb+TGHtv0+CxfrwXaM0KkV3GrCYGyROhxkGiNRZeo9NI8x6Mn+HiImol5NShlU4WxrPb1+r -wJb7po8EQQKpo2lJ1KBlsQUR0nONX91g/WLnMYgImT5qSHc1NADY7XZglRhhH5/xSS8hzDCsUnTa -6ksL261Wj4+6PQDJwiFEeKfdUu0ozQXJvPqdQUOHGkkrj71cx21LSOakvbmWb9jDl1bPfXDa8IEx -PQCQJehKegIACOGGvINFu9drVa39orRBAQqG6cjAqBsSNriSZmueuG9keoYR2kU3U+vkNj1NvSgR -lsG/5JWs3LgfosKW3ntn9ycVWABgWQ4kkcrDdJSeYZWNl48V7Vw5YaT+rWUjRw8LAhUGSn3e6fvJ -CykAEoF2kYoEdZDezZIwot0vJbEMdvHis5/8AFcbn/zj3JHJcd2fs2ABIMBowqKTSKJ7htzSU8wq -qKv1yt4tU28z/PzdZNBhsLrARn2RqhMMoEDcQiNvmPL2SQCASoQSj/n9popyR88XVm8+f/R8ZGrC -24/N7HEhBwNAaFiYgjpFpx0AeaUHChgzbY31Sqn61WdSQQ2S2QESdU8/BSDee/+/4Enb8o0nJFBK -KQEOFVc6JAmxDO6iCgcAgE9/PLTym12g03zyzPxgow562jjDANC3b3R4SJCtrogSyWdFQCklVnPD -wHjl4AF6aBcZDCAHTaBASIdZ9y9ofDfguwGKAcDKXyh0hYT20Wo7JwHZSD7bcWzJ8m/Ayb/3/ILZ -49J7u9EdERmZmDDAeuUUIOTLVpQApZJENCpWySGQiC/1Ek+ugK6cwVtUU1+2poSCid1+tLG8gRs1 -Ik2hUPgvGMrBf8WGfU++/SW02pYunvenBVN6eT4EA4DRaLxj/ATcXG4py2NVOs8sgpx95BqrI6EA -H63w5xRuayB+CYQCUMCATKxocS1fWxUbnzpx/NhOQrTaXQve+upPH30HTtcLT9/7t8VzvWVnz2up -so/fPWPmrl07dx34SmtYqguOBCJRQhiFCnMKN1Mg0MlfqURRAAsc48tW3h+hQCggAIkAUBCk5mr7 -wjeLLlaFrFm1KC4ujnhKQUGUNh0+//qXP5VeLgOO+/DVx567Z6KHDvVqRZKV24WFhT3z/LKrtc+d -3/Je6NC7NIERiFLEKdrqK+TFUDcV9bN7pEC7d9ZV1jmUHPLkb3eKYxmkUgDPi0qWYTCU1Lq+/Km+ -me/7xl+WzJk9S9Z7m911JOfKB9/vO3z4HDh50GuQRtlis3vXdVDvtr19JSUh5PTp0+u+/urokcMW -mwsQgzCqq2sak8rs/Wq4Rs243cBr9zr2nicv7D9lCdCx/pGQY6G+kW+1QVT0gNomG7G1AKt95OFZ -LyxemJw+zMGLv+QUHfn1yp4z+dln8qHdEZmasGTuhAPZBXv3nASM5mXd8Y+lC+T1lRvZqbdaW4qK -iyoryokkMQyz/rutTbX7d65J02lZEImf1wIAbbMJgigTNapTYwS03S4FBDDrd9Q9+OeG0bNm51uk -llYbMNyEkRlJcdEXy2pLaxqqa8zQ1AIKLj6l/5I5E7LGpkaHBdocrsdXrPth62EQhEEZA1c9d/8d -6Qk34ajBu8s/3r/9va2fJhv0CpCon38TSilSYsAAhIICORqcQEEdxIKB2bOxdurT5ZA8AcLiWAyU -Uqm5FUQROJY1aAdEhd6RljhvfMbYlHiOZfyHe+/bPS+v2QINTRAauOz+KW8+9juVgpOP5eBuqEQ3 -lyCIHpYP/tEJKCAAcEqUUKREhbnt979y2doqrn87YdSdQQ6nqNAHzrt79Ldna8UmKwjiiNvSH5g0 -IiI4ICk6fFBMn2sN99KCKYNjI57++/fleUXvr9585Ncr7zwx6055be8ahILtacOHQQgxCDqVYN6w -I+th037z+fw2ANi8v3HU1CCOpQFa9WsP3T3rd8y3e070Cw186YGpYaZebfVNH50yNKHvi2u2frvz -lzOHsyfll/5xzoR3n5hl0KpuZIcmPi6qugEKy1sggAUCv6VxCCg4Sda4wOg+KqOBnXGbCTA1NwsE -kCDROWNTHp8+Zun8SWEmvSBJkkR6PKYlSiQi2Lj+tUe//+vifmmJ0Gpb/eG3q348dIM+UFFRtuDB -P+jh7JZPhmr6KKGRlwQJIYr81hIpoVgBdpuEEKhjVFdzrZMWX44f8rt1//rHrgsV/9x6KMig++jp -ed4zOD1vb3r4T5vD9fcf9p/ML3nr8ayMxOgb0UB0dMyLLyy+VNVn4sKzP20qlySBCWKxnkEahDQI -qRBSI6xFoEKaSE7Nkc3flE95KreVxP/h949o9MZzl8o+fXa+QaMsqTFf3xYfAADo1crXH5m2+/0l -GYnR15potqf9NjR16tTP1ig+/Pjzxe8d/+vn1QnR7OD+mohgTqdhGAyCQFrbxNpGV36xs7BSrGxU -DRo86d1lT02ePAljzDH4aE5Rq92pvnZR2+PhKgBAnk2nGz+xZbE0Hjp84uCRU8VFV5qb6ojEswyW -yzhRIoC5AGNY//4Dxt8+YtztoyIi3IsIeaW1z36y8Y70hJcfmMIyzP/8uVFJkpqbrQ1mi8Ph9Koa -IVCplMFBpsBAE3vttdvf7k38/8FXAID/ANxlBEavIDdPAAAAAElFTkSuQmCC diff --git a/Golden_Repo/j/JupyterKernel-R/JupyterKernel-R-4.0.2-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterKernel-R/JupyterKernel-R-4.0.2-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index ee02c803106fcea641d1bd70912d2ad3052aa32e..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-R/JupyterKernel-R-4.0.2-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,61 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-R' -version = '4.0.2' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-3.8.5' - -homepage = 'https://github.com/IRkernel/IRkernel' -description = """ -Native R kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Jupyter', local_jupyterver, local_pysuffix), - ('R', '4.0.2', '-nompi'), -] - -modextrapaths = { - 'JUPYTER_PATH': 'share/jupyter', # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # Jupyter Kernel: R - https://github.com/IRkernel/IRkernel - # installs R kernel in $EBROOTJUPYTER/share/jupyter/kernels - 'R -e \'IRkernel::installspec(name="ir40", displayname="R 4.0", prefix="%(installdir)s")\'', - - # force options(bitmapType='cairo') -> https://github.com/IRkernel/IRkernel/issues/388 - ( - 'sed -i "s#IRkernel::main()#options(bitmapType=\'cairo\') ; IRkernel::main()#g" ' - ' %(installdir)s/share/jupyter/kernels/ir40/kernel.json' - ), -] - -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/ir40/kernel.json', - ], - 'dirs': [ - 'share/jupyter/kernels/ir40/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-R/JupyterKernel-R-4.0.2-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-R/JupyterKernel-R-4.0.2-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index fd59da64e06d6990887caf446fdfbee746cb2d61..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-R/JupyterKernel-R-4.0.2-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,113 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-R' -version = '4.0.2' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-3.8.5' - -homepage = 'https://github.com/IRkernel/IRkernel' -description = """ -Native R kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('R', version, '-nompi'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), -] - -local_kernel_dir = 'ir40' -local_kernel_name = 'R-%s' % version - -modextrapaths = { - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - # create kernel skeleton - ( - 'module purge && ' - 'module use $OTHERSTAGES && ' - 'module load Stages/${STAGE} && ' - 'module load GCCcore/.9.3.0 && ' - 'module load Jupyter/%s%s && ' - 'module load R/%s-nompi && ' - 'R -e \'IRkernel::installspec(name="%s", displayname="%s", prefix="%%(installdir)s")\' && ' - # force options(bitmapType='cairo') -> https://github.com/IRkernel/IRkernel/issues/388 - 'sed -i "s#IRkernel::main()#options(bitmapType=\'cairo\') ; IRkernel::main()#g" ' - ' %%(installdir)s/share/jupyter/kernels/%s/kernel.json' - ) % (local_jupyterver, local_pysuffix, version, - local_kernel_dir, local_kernel_name, local_kernel_dir), - - # write kernel.sh - ( - '{ cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.sh; } << EOF \n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module use \$OTHERSTAGES \n' - 'module load Stages/\${STAGE} \n' - 'module load GCCcore/.9.3.0 \n' - 'module load R/%s-nompi \n' - '\n' - 'exec \${EBROOTR}/lib64/R/bin/R "\$@"\n' - '\n' - 'EOF' - ) % (local_kernel_dir, version), - 'chmod +x %%(installdir)s/share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - - # write kernel.json - ( - 'cp %%(installdir)s/share/jupyter/kernels/%s/kernel.json ' - ' %%(installdir)s/share/jupyter/kernels/%s/kernel.json.orig && ' - '{ cat > %%(installdir)s/share/jupyter/kernels/%s/kernel.json; } << \'EOF\'\n' - '{ \n' - ' "argv": [ \n' - ' "%%(installdir)s/share/jupyter/kernels/%s/kernel.sh", \n' - ' "--slave", \n' - ' "-e", \n' - ' "options(bitmapType=\'cairo\') ; IRkernel::main()", \n' - ' "--args", \n' - ' "{connection_file}" \n' - ' ], \n' - ' "display_name": "%s", \n' - ' "language": "R", \n' - ' "name": "%s" \n' - '}\n' - 'EOF' - ) % (local_kernel_dir, local_kernel_dir, local_kernel_dir, local_kernel_dir, - local_kernel_name, local_kernel_dir), -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/%s/kernel.sh' % local_kernel_dir, - 'share/jupyter/kernels/%s/kernel.json' % local_kernel_dir, - ], - 'dirs': [ - 'share/jupyter/kernels/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Ruby/JupyterKernel-Ruby-2.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterKernel-Ruby/JupyterKernel-Ruby-2.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index a88416c91842db501138d1430faf784f9e915615..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Ruby/JupyterKernel-Ruby-2.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,65 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Ruby' -version = '2.7.1' -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-3.8.5' - -homepage = 'https://github.com/SciRuby/iruby' -description = """ -Native Ruby kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - ('Ruby', '2.7.1'), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - 'echo "#!/bin/bash" > %(builddir)s/env.sh', - 'echo "export JUPYTER_DATA_DIR=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyter_path, - - # install Ruby kernel in $JUPYTHER_PATH - 'source %(builddir)s/env.sh && iruby register --force ', - - # ensure correct permissions - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/ruby/kernel.json', - ], - 'dirs': [ - 'share/jupyter/kernels/ruby/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterKernel-Ruby/JupyterKernel-Ruby-2.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterKernel-Ruby/JupyterKernel-Ruby-2.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 0bacdd8cc6582a6ef13de9d9f70aa3b88335ec9f..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterKernel-Ruby/JupyterKernel-Ruby-2.7.1-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,65 +0,0 @@ -easyblock = 'Bundle' - -name = 'JupyterKernel-Ruby' -version = '2.7.1' -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -local_pysuffix = '-Python-3.8.5' - -homepage = 'https://github.com/SciRuby/iruby' -description = """ -Native Ruby kernel for Jupyter. -Project Jupyter exists to develop open-source software, open-standards, and services -for interactive computing across dozens of programming languages. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, local_pysuffix), - ('Ruby', '2.7.1'), -] - -local_jupyter_path = 'share/jupyter' - -modextrapaths = { - 'JUPYTER_PATH': ['share/jupyter'], # add search path for kernelspecs -} - -# Ensure that the user-specific $HOME/.local/share/jupyter is always first entry in JUPYTHER_PATH -modluafooter = """ -prepend_path("JUPYTER_PATH", pathJoin(os.getenv("HOME"), ".local/share/jupyter")) -""" - -postinstallcmds = [ - 'echo "#!/bin/bash" > %(builddir)s/env.sh', - 'echo "export JUPYTER_DATA_DIR=%%(installdir)s/%s" >> %%(builddir)s/env.sh' % local_jupyter_path, - - # install Ruby kernel in $JUPYTHER_PATH - 'source %(builddir)s/env.sh && iruby register --force ', - - # ensure correct permissions - 'source %(builddir)s/env.sh && chmod -R o+x %(installdir)s/share', -] - -# specify that Bundle easyblock should run a full sanity check, rather than just trying to load the module -# full_sanity_check = True -sanity_check_paths = { - 'files': [ - 'share/jupyter/kernels/ruby/kernel.json', - ], - 'dirs': [ - 'share/jupyter/kernels/ruby/', - ], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterProxy-Matlab/JupyterProxy-Matlab-0.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterProxy-Matlab/JupyterProxy-Matlab-0.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 9ff94876492897b9c64d0d1a12145d11c6339ce3..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterProxy-Matlab/JupyterProxy-Matlab-0.1.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'JupyterProxy-Matlab' -version = '0.1.0' - -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -homepage = '' -description = """ -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, '-Python-%(pyver)s'), -] - -exts_defaultclass = 'PythonPackage' -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, -} - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_list = [ - ('jupyter-matlab-proxy', '0.1.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://github.com/mathworks/jupyter-matlab-proxy/archive/']), - ('source_tmpl', '%(version)s.tar.gz'), - ('checksums', [('sha256', '4be1f317681cb1ec5e24ee0bad82e4c8317026edb2b29c4bb4176173cd7eb623')]), - ])), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterProxy-XpraHTML5/JupyterProxy-XpraHTML5-0.3.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb b/Golden_Repo/j/JupyterProxy-XpraHTML5/JupyterProxy-XpraHTML5-0.3.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb deleted file mode 100644 index 8fde03a92d49c001e80d348d9bd7c8bc68a240b3..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterProxy-XpraHTML5/JupyterProxy-XpraHTML5-0.3.0-gcccoremkl-9.3.0-2020.2.254-2020.2.5.eb +++ /dev/null @@ -1,57 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'JupyterProxy-XpraHTML5' -version = '0.3.0' - -local_jupyterver = '2020.2.5' -versionsuffix = '-' + local_jupyterver - -homepage = 'https://xpra.org' -description = """ -Jupyter proxy for Xpra HTML5 sessions. -Xpra is an open-source multi-platform persistent remote display server and client -for forwarding applications and desktop screens. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, '-Python-%(pyver)s'), -] - -exts_defaultclass = 'PythonPackage' -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, -} - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_list = [ - ('jupyter-xprahtml5-proxy', '0.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'db852682e8e366091e6a3984b60ac3d2e6b3197be2ef074440c11cb09e23b80b')]), - ('source_urls', ['https://github.com/FZJ-JSC/jupyter-xprahtml5-proxy/archive/']), - ('source_tmpl', 'v0.3.0_devel.tar.gz'), - ('patches', ['jupyter_xprahtml5_proxy-launch_xpra.patch']), - ('modulename', 'jupyter_xprahtml5_proxy'), - ])), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterProxy-XpraHTML5/JupyterProxy-XpraHTML5-0.3.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb b/Golden_Repo/j/JupyterProxy-XpraHTML5/JupyterProxy-XpraHTML5-0.3.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb deleted file mode 100644 index 2d40891bc7b2512e65566c31137fcdc8ef8b83d9..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterProxy-XpraHTML5/JupyterProxy-XpraHTML5-0.3.0-gcccoremkl-9.3.0-2020.2.254-2020.2.6.eb +++ /dev/null @@ -1,86 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'JupyterProxy-XpraHTML5' -version = '0.3.0' - -local_jupyterver = '2020.2.6' -versionsuffix = '-' + local_jupyterver - -homepage = 'https://xpra.org' -description = """ -Jupyter proxy for Xpra HTML5 sessions. -Xpra is an open-source multi-platform persistent remote display server and client -for forwarding applications and desktop screens. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('xpra', '4.0.4', '-Python-%(pyver)s'), # check for existance - ('jsc-xdg-menu', '2020.4'), # check for existance -] - -dependencies = [ - ('Python', '3.8.5'), - ('Jupyter', local_jupyterver, '-Python-%(pyver)s'), -] - -exts_defaultclass = 'PythonPackage' -exts_default_options = { - 'download_dep_fail': True, - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, -} - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_list = [ - ('jupyter-xprahtml5-proxy', '0.3.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6212ae766ebcecab31052cc1a42beebcbc65f7c310540e9da042ed027253ecc2')]), - ('source_urls', ['https://github.com/FZJ-JSC/jupyter-xprahtml5-proxy/archive/']), - ('source_tmpl', 'v0.3.0_devel.tar.gz'), - ('modulename', 'jupyter_xprahtml5_proxy'), - ])), -] - -postinstallcmds = [ - # write launch_xpra.sh - ( - '{ cat > %(installdir)s/lib/python%(pyshortver)s/' - 'site-packages/jupyter_xprahtml5_proxy/share/launch_xpra.sh; } << EOF \n' - '#!/bin/bash \n' - '\n' - '# Load required modules \n' - 'module purge \n' - 'module use \$OTHERSTAGES \n' - 'module load Stages/${STAGE} \n' - 'module load GCCcore/.9.3.0 \n' - 'module load xpra/4.0.4-Python-3.8.5 \n' - 'module load jsc-xdg-menu/.2020.4 \n' - '\n' - 'if ! command -v xterm &> /dev/null \n' - 'then \n' - ' echo "xterm not found - trying to load the xterm-module" \n' - ' module load xterm \n' - 'fi \n' - '\n' - 'xpra "\$@" \n' - '\n' - 'EOF' - ), - 'chmod +x %(installdir)s/lib/python%(pyshortver)s/site-packages/jupyter_xprahtml5_proxy/share/launch_xpra.sh' -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/j/JupyterProxy-XpraHTML5/jupyter_xprahtml5_proxy-launch_xpra.patch b/Golden_Repo/j/JupyterProxy-XpraHTML5/jupyter_xprahtml5_proxy-launch_xpra.patch deleted file mode 100644 index 100eb76f25e5ae2400283077f9e3e73d4c485680..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/JupyterProxy-XpraHTML5/jupyter_xprahtml5_proxy-launch_xpra.patch +++ /dev/null @@ -1,27 +0,0 @@ -diff -Naur jupyter-xprahtml5-proxy.orig/jupyter_xprahtml5_proxy/share/launch_xpra.sh jupyter-xprahtml5-proxy/jupyter_xprahtml5_proxy/share/launch_xpra.sh ---- jupyter-xprahtml5-proxy.orig/jupyter_xprahtml5_proxy/share/launch_xpra.sh 2020-11-19 00:13:39.000000000 +0100 -+++ jupyter-xprahtml5-proxy/jupyter_xprahtml5_proxy/share/launch_xpra.sh 2020-11-20 18:49:09.557792000 +0100 -@@ -5,10 +5,17 @@ - # Do it here. - - # example --# module purge > /dev/null --# module use $OTHERSTAGES > /dev/null --# module load Stages/2020 > /dev/null --# module load GCCcore/.9.3.0 > /dev/null --# module load xpra/4.0.4-Python-3.8.5 > /dev/null -+module purge -+module use $OTHERSTAGES -+module load Stages/2020 -+module load GCCcore/.9.3.0 -+module load xpra/4.0.4-Python-3.8.5 -+module load jsc-xdg-menu/.2020.4 -+ -+if ! command -v xterm &> /dev/null -+then -+ echo "xterm not found - trying to load the xterm-module" -+ module load xterm -+fi - - xpra "$@" - diff --git a/Golden_Repo/j/jemalloc/jemalloc-5.2.1-GCCcore-10.3.0.eb b/Golden_Repo/j/jemalloc/jemalloc-5.2.1-GCCcore-10.3.0.eb deleted file mode 100644 index 2f451bc747ccb1dee38e042e747ec73847d9c9bc..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/jemalloc/jemalloc-5.2.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'jemalloc' -version = '5.2.1' - -homepage = 'http://jemalloc.net' -description = """jemalloc is a general purpose malloc(3) implementation that emphasizes fragmentation avoidance and - scalable concurrency support.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/jemalloc/jemalloc/archive'] -sources = ['%(version)s.tar.gz'] -checksums = ['ed51b0b37098af4ca6ed31c22324635263f8ad6471889e0592a9c0dba9136aea'] - -builddependencies = [ - ('Autotools', '20200321'), - ('binutils', '2.36.1'), -] - -# From version 5.2.1 (or maybe earlier) it does no longer build, -# nor try to install, documentation if xsltproc is missing. -# So we can use normal installation. -preconfigopts = "./autogen.sh && " -configopts = "--with-version=%(version)s-0-g0000 " # build with version info - - -sanity_check_paths = { - 'files': ['bin/jeprof', 'lib/libjemalloc.a', 'lib/libjemalloc_pic.a', 'lib/libjemalloc.%s' % SHLIB_EXT, - 'include/jemalloc/jemalloc.h'], - 'dirs': [''], -} - -modextrapaths = {'LD_PRELOAD': ['lib/libjemalloc.%s' % SHLIB_EXT]} - -moduleclass = 'lib' diff --git a/Golden_Repo/j/jemalloc/jemalloc-5.2.1-GCCcore-9.3.0.eb b/Golden_Repo/j/jemalloc/jemalloc-5.2.1-GCCcore-9.3.0.eb deleted file mode 100644 index 3574707466c8bc3a0c81df5886c45309bd18248a..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/jemalloc/jemalloc-5.2.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'jemalloc' -version = '5.2.1' - -homepage = 'http://jemalloc.net' -description = """jemalloc is a general purpose malloc(3) implementation that emphasizes fragmentation avoidance and - scalable concurrency support.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/jemalloc/jemalloc/archive'] -sources = ['%(version)s.tar.gz'] -checksums = ['ed51b0b37098af4ca6ed31c22324635263f8ad6471889e0592a9c0dba9136aea'] - -builddependencies = [ - ('Autotools', '20200321'), - ('binutils', '2.34'), -] - -# From version 5.2.1 (or maybe earlier) it does no longer build, -# nor try to install, documentation if xsltproc is missing. -# So we can use normal installation. -preconfigopts = "./autogen.sh && " -configopts = "--with-version=%(version)s-0-g0000 " # build with version info - - -sanity_check_paths = { - 'files': ['bin/jeprof', 'lib/libjemalloc.a', 'lib/libjemalloc_pic.a', 'lib/libjemalloc.%s' % SHLIB_EXT, - 'include/jemalloc/jemalloc.h'], - 'dirs': [''], -} - -modextrapaths = {'LD_PRELOAD': ['lib/libjemalloc.%s' % SHLIB_EXT]} - -moduleclass = 'lib' diff --git a/Golden_Repo/j/jsc-xdg-menu/jsc-xdg-menu-2020.4-GCCcore-10.3.0.eb b/Golden_Repo/j/jsc-xdg-menu/jsc-xdg-menu-2020.4-GCCcore-10.3.0.eb deleted file mode 100644 index 7b8864fcaa2b44c327619e95a60f24ced0f70699..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/jsc-xdg-menu/jsc-xdg-menu-2020.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'Binary' - -name = 'jsc-xdg-menu' -version = '2020.4' - -homepage = '' -description = """setup JSC`s desktop menu""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://gitlab.version.fz-juelich.de/goebbert1/jsc-xdg-menu/-/archive/%(version)s/'] -sources = ['%(name)s-%(version)s.tar.gz'] -checksums = [('sha256', '866afc04be7a8b7068baca0323a0ed88c6bbccf793fd9b3f4f686e6ebc244548')], - -extract_sources = True, -install_cmd = 'cp -a %(builddir)s/%(name)s-%(version)s/* %(installdir)s/' - -modextravars = {'XDG_MENU_PREFIX': 'jsc-'} -modextrapaths = {'PATH': 'bin', - 'XDG_CONFIG_DIRS': 'config', - 'XDG_DATA_DIRS': 'data'} - -sanity_check_paths = { - 'files': ['README.md', 'config/menus/jsc-applications.menu'], - 'dirs': ['bin', 'config/menus', 'data/applications', 'data/desktop-directories'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/j/jsc-xdg-menu/jsc-xdg-menu-2020.4-GCCcore-9.3.0.eb b/Golden_Repo/j/jsc-xdg-menu/jsc-xdg-menu-2020.4-GCCcore-9.3.0.eb deleted file mode 100644 index f5467ffa095fbb1e4a2ab1032a9f9555fc3aa365..0000000000000000000000000000000000000000 --- a/Golden_Repo/j/jsc-xdg-menu/jsc-xdg-menu-2020.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'Binary' - -name = 'jsc-xdg-menu' -version = '2020.4' - -homepage = '' -description = """setup JSC`s desktop menu""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://gitlab.version.fz-juelich.de/goebbert1/jsc-xdg-menu/-/archive/%(version)s/'] -sources = ['%(name)s-%(version)s.tar.gz'] -checksums = [('sha256', '866afc04be7a8b7068baca0323a0ed88c6bbccf793fd9b3f4f686e6ebc244548')], - -extract_sources = True, -install_cmd = 'cp -a %(builddir)s/%(name)s-%(version)s/* %(installdir)s/' - -modextravars = {'XDG_MENU_PREFIX': 'jsc-'} -modextrapaths = {'PATH': 'bin', - 'XDG_CONFIG_DIRS': 'config', - 'XDG_DATA_DIRS': 'data'} - -sanity_check_paths = { - 'files': ['README.md', 'config/menus/jsc-applications.menu'], - 'dirs': ['bin', 'config/menus', 'data/applications', 'data/desktop-directories'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/k/kim-api/kim-api-2.1.3-GCC-10.3.0.eb b/Golden_Repo/k/kim-api/kim-api-2.1.3-GCC-10.3.0.eb deleted file mode 100644 index 4a226d6bb5c33c9a80b88f5ec2504496a64d47f2..0000000000000000000000000000000000000000 --- a/Golden_Repo/k/kim-api/kim-api-2.1.3-GCC-10.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'kim-api' -version = '2.1.3' - -homepage = 'https://openkim.org/' -description = """Open Knowledgebase of Interatomic Models. - -KIM is an API and OpenKIM is a collection of interatomic models (potentials) for -atomistic simulations. This is a library that can be used by simulation programs -to get access to the models in the OpenKIM database. - -This EasyBuild only installs the API, the models can be installed with the -package openkim-models, or the user can install them manually by running - kim-api-collections-management install user MODELNAME -or - kim-api-collections-management install user OpenKIM -to install them all. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} - -source_urls = ['https://s3.openkim.org/%(name)s/'] -sources = [SOURCE_TXZ] -checksums = ['88a5416006c65a2940d82fad49de0885aead05bfa8b59f87d287db5516b9c467'] - - -dependencies = [ - # Also needed to install models, thus not just a builddependenc - ('CMake', '3.18.0', '', SYSTEM), -] - -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/%(name)s-collections-management', 'lib64/libkim-api.so'], - 'dirs': [], -} - -modextravars = {'KIM_API_CMAKE_PREFIX_DIR': '%(installdir)s/lib64'} - -moduleclass = 'chem' diff --git a/Golden_Repo/k/kim-api/kim-api-2.1.3-GCC-9.3.0.eb b/Golden_Repo/k/kim-api/kim-api-2.1.3-GCC-9.3.0.eb deleted file mode 100644 index 817f317218703444e26a07973179b660765b7f0b..0000000000000000000000000000000000000000 --- a/Golden_Repo/k/kim-api/kim-api-2.1.3-GCC-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'kim-api' -version = '2.1.3' - -homepage = 'https://openkim.org/' -description = """Open Knowledgebase of Interatomic Models. - -KIM is an API and OpenKIM is a collection of interatomic models (potentials) for -atomistic simulations. This is a library that can be used by simulation programs -to get access to the models in the OpenKIM database. - -This EasyBuild only installs the API, the models can be installed with the -package openkim-models, or the user can install them manually by running - kim-api-collections-management install user MODELNAME -or - kim-api-collections-management install user OpenKIM -to install them all. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['https://s3.openkim.org/%(name)s/'] -sources = [SOURCE_TXZ] -checksums = ['88a5416006c65a2940d82fad49de0885aead05bfa8b59f87d287db5516b9c467'] - -dependencies = [ - ('CMake', '3.18.0'), # Also needed to install models, thus not just a builddependency. -] - -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/%(name)s-collections-management', 'lib64/libkim-api.so'], - 'dirs': [], -} - -modextravars = {'KIM_API_CMAKE_PREFIX_DIR': '%(installdir)s/lib64'} - -moduleclass = 'chem' diff --git a/Golden_Repo/k/kim-api/kim-api-2.1.3-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/k/kim-api/kim-api-2.1.3-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 98c59e1952cfe68cb9ee9c0ca6e5b2a9f7901d6b..0000000000000000000000000000000000000000 --- a/Golden_Repo/k/kim-api/kim-api-2.1.3-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'kim-api' -version = '2.1.3' - -homepage = 'https://openkim.org/' -description = """Open Knowledgebase of Interatomic Models. - -KIM is an API and OpenKIM is a collection of interatomic models (potentials) for -atomistic simulations. This is a library that can be used by simulation programs -to get access to the models in the OpenKIM database. - -This EasyBuild only installs the API, the models can be installed with the -package openkim-models, or the user can install them manually by running - kim-api-collections-management install user MODELNAME -or - kim-api-collections-management install user OpenKIM -to install them all. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = ['https://s3.openkim.org/%(name)s/'] -sources = [SOURCE_TXZ] -checksums = ['88a5416006c65a2940d82fad49de0885aead05bfa8b59f87d287db5516b9c467'] - -dependencies = [ - ('CMake', '3.18.0'), # Also needed to install models, thus not just a builddependency. -] - -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/%(name)s-collections-management', 'lib64/libkim-api.so'], - 'dirs': [], -} - -modextravars = {'KIM_API_CMAKE_PREFIX_DIR': '%(installdir)s/lib64'} - -moduleclass = 'chem' diff --git a/Golden_Repo/k/kim-api/kim-api-2.1.3-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/k/kim-api/kim-api-2.1.3-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index cc62183dbbf2d0dc6c2c9affa6bbbc6c9a3a2c2a..0000000000000000000000000000000000000000 --- a/Golden_Repo/k/kim-api/kim-api-2.1.3-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'kim-api' -version = '2.1.3' - -homepage = 'https://openkim.org/' -description = """Open Knowledgebase of Interatomic Models. - -KIM is an API and OpenKIM is a collection of interatomic models (potentials) for -atomistic simulations. This is a library that can be used by simulation programs -to get access to the models in the OpenKIM database. - -This EasyBuild only installs the API, the models can be installed with the -package openkim-models, or the user can install them manually by running - kim-api-collections-management install user MODELNAME -or - kim-api-collections-management install user OpenKIM -to install them all. - """ - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -source_urls = ['https://s3.openkim.org/%(name)s/'] -sources = [SOURCE_TXZ] -checksums = ['88a5416006c65a2940d82fad49de0885aead05bfa8b59f87d287db5516b9c467'] - -dependencies = [ - # Also needed to install models, thus not just a builddependenc - ('CMake', '3.18.0', '', SYSTEM), -] - -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/%(name)s-collections-management', 'lib64/libkim-api.so'], - 'dirs': [], -} - -modextravars = {'KIM_API_CMAKE_PREFIX_DIR': '%(installdir)s/lib64'} - -moduleclass = 'chem' diff --git a/Golden_Repo/l/LAME/LAME-3.100-GCCcore-10.3.0.eb b/Golden_Repo/l/LAME/LAME-3.100-GCCcore-10.3.0.eb deleted file mode 100644 index 9c368306c5a929001f21c2ade9e7567f40c1f353..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LAME/LAME-3.100-GCCcore-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -# # -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# -# Author: Stephane Thiell <sthiell@stanford.edu> -# ## - -easyblock = 'ConfigureMake' - -name = 'LAME' -version = '3.100' - -homepage = 'http://lame.sourceforge.net/' -description = "LAME is a high quality MPEG Audio Layer III (MP3) encoder licensed under the LGPL." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://sourceforge.net/projects/%(namelower)s/files/%(namelower)s/%(version_major_minor)s/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['ddfe36cab873794038ae2c1210557ad34857a4b6bdc515785d1da9e175b1da1e'] - -builddependencies = [ - ('binutils', '2.36.1'), -] -dependencies = [ - ('ncurses', '6.2'), -] - -# configure is broken: add workaround to find libncurses... -configure_cmd_prefix = "FRONTEND_LDADD='-L${EBROOTNCURSES}/lib' " - -sanity_check_paths = { - 'files': ['bin/%(namelower)s', 'include/%(namelower)s/%(namelower)s.h', 'lib/libmp3lame.so'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/l/LAME/LAME-3.100-GCCcore-9.3.0.eb b/Golden_Repo/l/LAME/LAME-3.100-GCCcore-9.3.0.eb deleted file mode 100644 index 791f7598c62abedfd552ed21e1a3ba1f15916b9e..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LAME/LAME-3.100-GCCcore-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -# # -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# -# Author: Stephane Thiell <sthiell@stanford.edu> -# ## - -easyblock = 'ConfigureMake' - -name = 'LAME' -version = '3.100' - -homepage = 'http://lame.sourceforge.net/' -description = "LAME is a high quality MPEG Audio Layer III (MP3) encoder licensed under the LGPL." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://sourceforge.net/projects/%(namelower)s/files/%(namelower)s/%(version_major_minor)s/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['ddfe36cab873794038ae2c1210557ad34857a4b6bdc515785d1da9e175b1da1e'] - -builddependencies = [ - ('binutils', '2.34'), -] -dependencies = [ - ('ncurses', '6.2'), -] - -# configure is broken: add workaround to find libncurses... -configure_cmd_prefix = "FRONTEND_LDADD='-L${EBROOTNCURSES}/lib' " - -sanity_check_paths = { - 'files': ['bin/%(namelower)s', 'include/%(namelower)s/%(namelower)s.h', 'lib/libmp3lame.so'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb b/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb deleted file mode 100644 index 9c542791db07c5be762008ee87596fe7d80461cc..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb +++ /dev/null @@ -1,168 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '24Dec2020' -versionsuffix = '-CUDA' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'patch_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -cuda_compute_capabilities = ['7.0'] - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# Use the bfd linker for C++ (this will only be picked up when using Kokkos) -preconfigopts = 'export CXXFLAGS="-fuse-ld=bfd $CXXFLAGS" &&' -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'chem' diff --git a/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020.eb b/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020.eb deleted file mode 100644 index 49797ab28bdc54178c5f1b68512a695fdcb589c5..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020.eb +++ /dev/null @@ -1,158 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '24Dec2020' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'patch_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -moduleclass = 'chem' diff --git a/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-intel-para-2020.eb b/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-intel-para-2020.eb deleted file mode 100644 index 652bd42b756c9e5ad170969b2fe66d098a3e959e..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LAMMPS/LAMMPS-24Dec2020-intel-para-2020.eb +++ /dev/null @@ -1,158 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '24Dec2020' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'patch_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -moduleclass = 'chem' diff --git a/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb b/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb deleted file mode 100644 index 3a53f84ceb7dd4b9f45a5bdd24bf46daaac26f6c..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb +++ /dev/null @@ -1,168 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '29Oct2020' -versionsuffix = '-CUDA' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'stable_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -cuda_compute_capabilities = ['7.0'] - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# Use the bfd linker for C++ (this will only be picked up when using Kokkos) -preconfigopts = 'export CXXFLAGS="-fuse-ld=bfd $CXXFLAGS" &&' -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'chem' diff --git a/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020.eb b/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020.eb deleted file mode 100644 index 3ba75ffeab25b7874cf46f29c60f2d8574ea184a..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020.eb +++ /dev/null @@ -1,158 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '29Oct2020' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'stable_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -moduleclass = 'chem' diff --git a/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-intel-para-2020.eb b/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-intel-para-2020.eb deleted file mode 100644 index b1b8feda2f73d33eb14aecbe3b9af3d987529a4e..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LAMMPS/LAMMPS-29Oct2020-intel-para-2020.eb +++ /dev/null @@ -1,158 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '29Oct2020' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'stable_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -moduleclass = 'chem' diff --git a/Golden_Repo/l/LLVM/LLVM-10.0.1-GCCcore-10.3.0.eb b/Golden_Repo/l/LLVM/LLVM-10.0.1-GCCcore-10.3.0.eb deleted file mode 100644 index e498fb4b2fc59fb282bf65394671acd392acaac4..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LLVM/LLVM-10.0.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'LLVM' -version = '10.0.1' - -homepage = "https://llvm.org/" -description = """The LLVM Core libraries provide a modern source- and target-independent -optimizer, along with code generation support for many popular CPUs -(as well as some less common ones!) These libraries are built around a well -specified code representation known as the LLVM intermediate representation -("LLVM IR"). The LLVM Core libraries are well documented, and it is -particularly easy to invent your own language (or port an existing compiler) -to use LLVM as an optimizer and code generator.""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'cstd': 'gnu++11'} - -source_urls = ['https://github.com/llvm/llvm-project/releases/download/llvmorg-%(version)s/'] -sources = ['llvm-%(version)s.src.tar.xz'] - -patches = [ - 'llvm-10.0.1-gcc-9.3.0-drop-visibility-inlines.patch', - 'intel-D47188-svml-VF.patch' -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('Python', '3.8.5'), -] - -dependencies = [ - ('ncurses', '6.2'), - ('zlib', '1.2.11'), -] - -build_shared_libs = True - -sanity_check_paths = { - 'files': ['bin/llvm-ar', 'bin/FileCheck'], - 'dirs': ['include/llvm', 'include/llvm-c'], -} - -moduleclass = 'compiler' diff --git a/Golden_Repo/l/LLVM/LLVM-10.0.1-GCCcore-9.3.0.eb b/Golden_Repo/l/LLVM/LLVM-10.0.1-GCCcore-9.3.0.eb deleted file mode 100644 index b71c595bf543d52f1ef16aaa7a5e58614184b3a4..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LLVM/LLVM-10.0.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'LLVM' -version = '10.0.1' - -homepage = "https://llvm.org/" -description = """The LLVM Core libraries provide a modern source- and target-independent -optimizer, along with code generation support for many popular CPUs -(as well as some less common ones!) These libraries are built around a well -specified code representation known as the LLVM intermediate representation -("LLVM IR"). The LLVM Core libraries are well documented, and it is -particularly easy to invent your own language (or port an existing compiler) -to use LLVM as an optimizer and code generator.""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'cstd': 'gnu++11'} - -source_urls = ['https://github.com/llvm/llvm-project/releases/download/llvmorg-%(version)s/'] -sources = ['llvm-%(version)s.src.tar.xz'] - -patches = [ - 'llvm-10.0.1-gcc-9.3.0-drop-visibility-inlines.patch', - 'intel-D47188-svml-VF.patch' -] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Python', '3.8.5'), -] - -dependencies = [ - ('ncurses', '6.2'), - ('zlib', '1.2.11'), -] - -build_shared_libs = True - -sanity_check_paths = { - 'files': ['bin/llvm-ar', 'bin/FileCheck'], - 'dirs': ['include/llvm', 'include/llvm-c'], -} - -moduleclass = 'compiler' diff --git a/Golden_Repo/l/LLVM/intel-D47188-svml-VF.patch b/Golden_Repo/l/LLVM/intel-D47188-svml-VF.patch deleted file mode 100644 index d4d29fc1b21a79656b80dddbe5b61c7ec1309d27..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LLVM/intel-D47188-svml-VF.patch +++ /dev/null @@ -1,1891 +0,0 @@ -From 83294690d80a899e08cc455efb443bb957303947 Mon Sep 17 00:00:00 2001 -From: Tim Snyder <snyder.tim@gmail.com> -Date: Mon, 30 Mar 2020 15:37:33 -0500 -Subject: [PATCH] Fixes vectorizer and extends SVML support - -Updates patch from LLVM 9.0.0 to apply on 10.0.0. Original patch -merged several fixes: - -1. https://reviews.llvm.org/D47188 patch fixes the problem with improper calls -to SVML library as it has non-standard calling conventions. So accordingly it -has SVML calling conventions definitions and code to set CC to the vectorized -calls. As SVML provides several implementations for the math functions we also -took into consideration fast attribute and select more fast implementation in -such case. This work is based on original Matt Masten's work. -Author: Denis Nagorny - -2. https://reviews.llvm.org/D53035 patch implements support to legalize SVML -calls by breaking down the illegal vector call instruction into multiple legal -vector call instructions during code generation. Currently the vectorizer does -not check legality of the generated SVML (or any VECLIB) call instructions, and -this can lead to potential problems even during vector type legalization. This -patch addresses this issue by adding a legality check during code generation and -replaces the illegal SVML call with corresponding legalized instructions. -(RFC: http://lists.llvm.org/pipermail/llvm-dev/2018-June/124357.html) -Author: Karthik Senthil - -3. Functional merge of the patches above, which fixes calling convention ---- - include/llvm/Analysis/TargetLibraryInfo.h | 17 +- - include/llvm/IR/CMakeLists.txt | 4 + - include/llvm/IR/CallingConv.h | 3 + - include/llvm/IR/SVML.td | 62 +++ - lib/Analysis/CMakeLists.txt | 1 + - lib/Analysis/TargetLibraryInfo.cpp | 25 +- - lib/AsmParser/LLLexer.cpp | 1 + - lib/AsmParser/LLParser.cpp | 2 + - lib/AsmParser/LLToken.h | 1 + - lib/IR/AsmWriter.cpp | 1 + - lib/IR/Verifier.cpp | 1 + - lib/Target/X86/X86CallingConv.td | 67 ++- - lib/Target/X86/X86ISelLowering.cpp | 3 +- - lib/Target/X86/X86RegisterInfo.cpp | 34 ++ - lib/Target/X86/X86Subtarget.h | 1 + - lib/Transforms/Utils/InjectTLIMappings.cpp | 3 +- - lib/Transforms/Vectorize/LoopVectorize.cpp | 279 +++++++++- - .../LoopVectorize/X86/svml-calls-finite.ll | 9 +- - .../LoopVectorize/X86/svml-calls.ll | 81 ++- - .../LoopVectorize/X86/svml-legal-calls.ll | 513 ++++++++++++++++++ - .../LoopVectorize/X86/svml-legal-codegen.ll | 61 +++ - utils/TableGen/CMakeLists.txt | 1 + - utils/TableGen/SVMLEmitter.cpp | 110 ++++ - utils/TableGen/TableGen.cpp | 8 +- - utils/TableGen/TableGenBackends.h | 1 + - utils/vim/syntax/llvm.vim | 1 + - 26 files changed, 1248 insertions(+), 42 deletions(-) - create mode 100644 include/llvm/IR/SVML.td - create mode 100644 test/Transforms/LoopVectorize/X86/svml-legal-calls.ll - create mode 100644 test/Transforms/LoopVectorize/X86/svml-legal-codegen.ll - create mode 100644 utils/TableGen/SVMLEmitter.cpp - -diff --git a/include/llvm/Analysis/TargetLibraryInfo.h b/include/llvm/Analysis/TargetLibraryInfo.h -index 1bd9db756..e91833b5c 100644 ---- a/include/llvm/Analysis/TargetLibraryInfo.h -+++ b/include/llvm/Analysis/TargetLibraryInfo.h -@@ -39,6 +39,12 @@ struct VecDesc { - NotLibFunc - }; - -+enum SVMLAccuracy { -+ SVML_DEFAULT, -+ SVML_HA, -+ SVML_EP -+}; -+ - /// Implementation of the target library information. - /// - /// This class constructs tables that hold the target library information and -@@ -152,7 +158,8 @@ public: - /// Return true if the function F has a vector equivalent with vectorization - /// factor VF. - bool isFunctionVectorizable(StringRef F, unsigned VF) const { -- return !getVectorizedFunction(F, VF).empty(); -+ bool Ignored; -+ return !getVectorizedFunction(F, VF, Ignored, false).empty(); - } - - /// Return true if the function F has a vector equivalent with any -@@ -161,7 +168,8 @@ public: - - /// Return the name of the equivalent of F, vectorized with factor VF. If no - /// such mapping exists, return the empty string. -- StringRef getVectorizedFunction(StringRef F, unsigned VF) const; -+ std::string getVectorizedFunction(StringRef F, unsigned VF, bool &FromSVML, -+ bool IsFast) const; - - /// Return true if the function F has a scalar equivalent, and set VF to be - /// the vectorization factor. -@@ -307,8 +315,9 @@ public: - bool isFunctionVectorizable(StringRef F) const { - return Impl->isFunctionVectorizable(F); - } -- StringRef getVectorizedFunction(StringRef F, unsigned VF) const { -- return Impl->getVectorizedFunction(F, VF); -+ std::string getVectorizedFunction(StringRef F, unsigned VF, bool &FromSVML, -+ bool IsFast) const { -+ return Impl->getVectorizedFunction(F, VF, FromSVML, IsFast); - } - - /// Tests if the function is both available and a candidate for optimized code -diff --git a/include/llvm/IR/CMakeLists.txt b/include/llvm/IR/CMakeLists.txt -index c8edc29bd..e532ce08c 100644 ---- a/include/llvm/IR/CMakeLists.txt -+++ b/include/llvm/IR/CMakeLists.txt -@@ -19,3 +19,7 @@ tablegen(LLVM IntrinsicsWebAssembly.h -gen-intrinsic-enums -intrinsic-prefix=was - tablegen(LLVM IntrinsicsX86.h -gen-intrinsic-enums -intrinsic-prefix=x86) - tablegen(LLVM IntrinsicsXCore.h -gen-intrinsic-enums -intrinsic-prefix=xcore) - add_public_tablegen_target(intrinsics_gen) -+ -+set(LLVM_TARGET_DEFINITIONS SVML.td) -+tablegen(LLVM SVML.inc -gen-svml) -+add_public_tablegen_target(svml_gen) -diff --git a/include/llvm/IR/CallingConv.h b/include/llvm/IR/CallingConv.h -index d0906de3e..5179ec8ee 100644 ---- a/include/llvm/IR/CallingConv.h -+++ b/include/llvm/IR/CallingConv.h -@@ -241,6 +241,9 @@ namespace CallingConv { - /// The remainder matches the regular calling convention. - WASM_EmscriptenInvoke = 99, - -+ /// Intel_SVML - Calling conventions for Intel Short Math Vector Library -+ Intel_SVML = 100, -+ - /// The highest possible calling convention ID. Must be some 2^k - 1. - MaxID = 1023 - }; -diff --git a/include/llvm/IR/SVML.td b/include/llvm/IR/SVML.td -new file mode 100644 -index 000000000..5af710404 ---- /dev/null -+++ b/include/llvm/IR/SVML.td -@@ -0,0 +1,62 @@ -+//===-- Intel_SVML.td - Defines SVML call variants ---------*- tablegen -*-===// -+// -+// The LLVM Compiler Infrastructure -+// -+// This file is distributed under the University of Illinois Open Source -+// License. See LICENSE.TXT for details. -+// -+//===----------------------------------------------------------------------===// -+// -+// This file is used by TableGen to define the different typs of SVML function -+// variants used with -fveclib=SVML. -+// -+//===----------------------------------------------------------------------===// -+ -+class SvmlVariant; -+ -+def sin : SvmlVariant; -+def cos : SvmlVariant; -+def pow : SvmlVariant; -+def exp : SvmlVariant; -+def log : SvmlVariant; -+def acos : SvmlVariant; -+def acosh : SvmlVariant; -+def asin : SvmlVariant; -+def asinh : SvmlVariant; -+def atan2 : SvmlVariant; -+def atan : SvmlVariant; -+def atanh : SvmlVariant; -+def cbrt : SvmlVariant; -+def cdfnorm : SvmlVariant; -+def cdfnorminv : SvmlVariant; -+def cosd : SvmlVariant; -+def cosh : SvmlVariant; -+def erf : SvmlVariant; -+def erfc : SvmlVariant; -+def erfcinv : SvmlVariant; -+def erfinv : SvmlVariant; -+def exp10 : SvmlVariant; -+def exp2 : SvmlVariant; -+def expm1 : SvmlVariant; -+def hypot : SvmlVariant; -+def invsqrt : SvmlVariant; -+def log10 : SvmlVariant; -+def log1p : SvmlVariant; -+def log2 : SvmlVariant; -+def sind : SvmlVariant; -+def sinh : SvmlVariant; -+def sqrt : SvmlVariant; -+def tan : SvmlVariant; -+def tanh : SvmlVariant; -+ -+// TODO: SVML does not currently provide _ha and _ep variants of these fucnctions. -+// We should call the default variant of these functions in all cases instead. -+ -+// def nearbyint : SvmlVariant; -+// def logb : SvmlVariant; -+// def floor : SvmlVariant; -+// def fmod : SvmlVariant; -+// def ceil : SvmlVariant; -+// def trunc : SvmlVariant; -+// def rint : SvmlVariant; -+// def round : SvmlVariant; -diff --git a/lib/Analysis/CMakeLists.txt b/lib/Analysis/CMakeLists.txt -index cc9ff0bc1..e0da04d04 100644 ---- a/lib/Analysis/CMakeLists.txt -+++ b/lib/Analysis/CMakeLists.txt -@@ -104,4 +104,5 @@ add_llvm_component_library(LLVMAnalysis - - DEPENDS - intrinsics_gen -+ svml_gen - ) -diff --git a/lib/Analysis/TargetLibraryInfo.cpp b/lib/Analysis/TargetLibraryInfo.cpp -index 1a32adf47..bfa6476a8 100644 ---- a/lib/Analysis/TargetLibraryInfo.cpp -+++ b/lib/Analysis/TargetLibraryInfo.cpp -@@ -63,6 +63,11 @@ static bool hasBcmp(const Triple &TT) { - return TT.isOSFreeBSD() || TT.isOSSolaris(); - } - -+std::string svmlMangle(StringRef FnName, const bool IsFast) { -+ std::string FullName = FnName; -+ return IsFast ? FullName : FullName + "_ha"; -+} -+ - /// Initialize the set of available library functions based on the specified - /// target triple. This should be carefully written so that a missing target - /// triple gets a sane set of defaults. -@@ -1539,8 +1544,9 @@ void TargetLibraryInfoImpl::addVectorizableFunctionsFromVecLib( - } - case SVML: { - const VecDesc VecFuncs[] = { -- #define TLI_DEFINE_SVML_VECFUNCS -- #include "llvm/Analysis/VecFuncs.def" -+ #define GET_SVML_VARIANTS -+ #include "llvm/IR/SVML.inc" -+ #undef GET_SVML_VARIANTS - }; - addVectorizableFunctions(VecFuncs); - break; -@@ -1560,19 +1566,26 @@ bool TargetLibraryInfoImpl::isFunctionVectorizable(StringRef funcName) const { - return I != VectorDescs.end() && StringRef(I->ScalarFnName) == funcName; - } - --StringRef TargetLibraryInfoImpl::getVectorizedFunction(StringRef F, -- unsigned VF) const { -+std::string TargetLibraryInfoImpl::getVectorizedFunction(StringRef F, -+ unsigned VF, -+ bool &FromSVML, -+ bool IsFast) const { -+ FromSVML = ClVectorLibrary == SVML; - F = sanitizeFunctionName(F); - if (F.empty()) - return F; - std::vector<VecDesc>::const_iterator I = - llvm::lower_bound(VectorDescs, F, compareWithScalarFnName); - while (I != VectorDescs.end() && StringRef(I->ScalarFnName) == F) { -- if (I->VectorizationFactor == VF) -+ if (I->VectorizationFactor == VF) { -+ if (FromSVML) { -+ return svmlMangle(I->VectorFnName, IsFast); -+ } - return I->VectorFnName; -+ } - ++I; - } -- return StringRef(); -+ return std::string(); - } - - StringRef TargetLibraryInfoImpl::getScalarizedFunction(StringRef F, -diff --git a/lib/AsmParser/LLLexer.cpp b/lib/AsmParser/LLLexer.cpp -index d96b5e0bf..d30bf9631 100644 ---- a/lib/AsmParser/LLLexer.cpp -+++ b/lib/AsmParser/LLLexer.cpp -@@ -603,6 +603,7 @@ lltok::Kind LLLexer::LexIdentifier() { - KEYWORD(spir_kernel); - KEYWORD(spir_func); - KEYWORD(intel_ocl_bicc); -+ KEYWORD(intel_svmlcc); - KEYWORD(x86_64_sysvcc); - KEYWORD(win64cc); - KEYWORD(x86_regcallcc); -diff --git a/lib/AsmParser/LLParser.cpp b/lib/AsmParser/LLParser.cpp -index 1a17f633a..29c042824 100644 ---- a/lib/AsmParser/LLParser.cpp -+++ b/lib/AsmParser/LLParser.cpp -@@ -1921,6 +1921,7 @@ void LLParser::ParseOptionalDLLStorageClass(unsigned &Res) { - /// ::= 'ccc' - /// ::= 'fastcc' - /// ::= 'intel_ocl_bicc' -+/// ::= 'intel_svmlcc' - /// ::= 'coldcc' - /// ::= 'cfguard_checkcc' - /// ::= 'x86_stdcallcc' -@@ -1989,6 +1990,7 @@ bool LLParser::ParseOptionalCallingConv(unsigned &CC) { - case lltok::kw_spir_kernel: CC = CallingConv::SPIR_KERNEL; break; - case lltok::kw_spir_func: CC = CallingConv::SPIR_FUNC; break; - case lltok::kw_intel_ocl_bicc: CC = CallingConv::Intel_OCL_BI; break; -+ case lltok::kw_intel_svmlcc: CC = CallingConv::Intel_SVML; break; - case lltok::kw_x86_64_sysvcc: CC = CallingConv::X86_64_SysV; break; - case lltok::kw_win64cc: CC = CallingConv::Win64; break; - case lltok::kw_webkit_jscc: CC = CallingConv::WebKit_JS; break; -diff --git a/lib/AsmParser/LLToken.h b/lib/AsmParser/LLToken.h -index e430e0f6f..fdf86efd6 100644 ---- a/lib/AsmParser/LLToken.h -+++ b/lib/AsmParser/LLToken.h -@@ -132,6 +132,7 @@ enum Kind { - kw_fastcc, - kw_coldcc, - kw_intel_ocl_bicc, -+ kw_intel_svmlcc, - kw_cfguard_checkcc, - kw_x86_stdcallcc, - kw_x86_fastcallcc, -diff --git a/lib/IR/AsmWriter.cpp b/lib/IR/AsmWriter.cpp -index 1f978d136..970aa81d4 100644 ---- a/lib/IR/AsmWriter.cpp -+++ b/lib/IR/AsmWriter.cpp -@@ -360,6 +360,7 @@ static void PrintCallingConv(unsigned cc, raw_ostream &Out) { - case CallingConv::X86_RegCall: Out << "x86_regcallcc"; break; - case CallingConv::X86_VectorCall:Out << "x86_vectorcallcc"; break; - case CallingConv::Intel_OCL_BI: Out << "intel_ocl_bicc"; break; -+ case CallingConv::Intel_SVML: Out << "intel_svmlcc"; break; - case CallingConv::ARM_APCS: Out << "arm_apcscc"; break; - case CallingConv::ARM_AAPCS: Out << "arm_aapcscc"; break; - case CallingConv::ARM_AAPCS_VFP: Out << "arm_aapcs_vfpcc"; break; -diff --git a/lib/IR/Verifier.cpp b/lib/IR/Verifier.cpp -index 61707cc83..fbdd6e1ce 100644 ---- a/lib/IR/Verifier.cpp -+++ b/lib/IR/Verifier.cpp -@@ -2222,6 +2222,7 @@ void Verifier::visitFunction(const Function &F) { - case CallingConv::Fast: - case CallingConv::Cold: - case CallingConv::Intel_OCL_BI: -+ case CallingConv::Intel_SVML: - case CallingConv::PTX_Kernel: - case CallingConv::PTX_Device: - Assert(!F.isVarArg(), "Calling convention does not support varargs or " -diff --git a/lib/Target/X86/X86CallingConv.td b/lib/Target/X86/X86CallingConv.td -index db1aef2fd..9fc4c1774 100644 ---- a/lib/Target/X86/X86CallingConv.td -+++ b/lib/Target/X86/X86CallingConv.td -@@ -482,6 +482,21 @@ def RetCC_X86_64 : CallingConv<[ - CCDelegateTo<RetCC_X86_64_C> - ]>; - -+// Intel_SVML return-value convention. -+def RetCC_Intel_SVML : CallingConv<[ -+ // Vector types are returned in XMM0,XMM1 -+ CCIfType<[v4f32, v2f64], -+ CCAssignToReg<[XMM0,XMM1]>>, -+ -+ // 256-bit FP vectors -+ CCIfType<[v8f32, v4f64], -+ CCAssignToReg<[YMM0,YMM1]>>, -+ -+ // 512-bit FP vectors -+ CCIfType<[v16f32, v8f64], -+ CCAssignToReg<[ZMM0,ZMM1]>> -+]>; -+ - // This is the return-value convention used for the entire X86 backend. - let Entry = 1 in - def RetCC_X86 : CallingConv<[ -@@ -489,6 +504,8 @@ def RetCC_X86 : CallingConv<[ - // Check if this is the Intel OpenCL built-ins calling convention - CCIfCC<"CallingConv::Intel_OCL_BI", CCDelegateTo<RetCC_Intel_OCL_BI>>, - -+ CCIfCC<"CallingConv::Intel_SVML", CCDelegateTo<RetCC_Intel_SVML>>, -+ - CCIfSubtarget<"is64Bit()", CCDelegateTo<RetCC_X86_64>>, - CCDelegateTo<RetCC_X86_32> - ]>; -@@ -1005,6 +1022,30 @@ def CC_Intel_OCL_BI : CallingConv<[ - CCDelegateTo<CC_X86_32_C> - ]>; - -+// X86-64 Intel Short Vector Math Library calling convention. -+def CC_Intel_SVML : CallingConv<[ -+ -+ // The SSE vector arguments are passed in XMM registers. -+ CCIfType<[v4f32, v2f64], -+ CCAssignToReg<[XMM0, XMM1, XMM2]>>, -+ -+ // The 256-bit vector arguments are passed in YMM registers. -+ CCIfType<[v8f32, v4f64], -+ CCAssignToReg<[YMM0, YMM1, YMM2]>>, -+ -+ // The 512-bit vector arguments are passed in ZMM registers. -+ CCIfType<[v16f32, v8f64], -+ CCAssignToReg<[ZMM0, ZMM1, ZMM2]>> -+]>; -+ -+def CC_X86_32_Intr : CallingConv<[ -+ CCAssignToStack<4, 4> -+]>; -+ -+def CC_X86_64_Intr : CallingConv<[ -+ CCAssignToStack<8, 8> -+]>; -+ - //===----------------------------------------------------------------------===// - // X86 Root Argument Calling Conventions - //===----------------------------------------------------------------------===// -@@ -1056,6 +1097,7 @@ def CC_X86_64 : CallingConv<[ - let Entry = 1 in - def CC_X86 : CallingConv<[ - CCIfCC<"CallingConv::Intel_OCL_BI", CCDelegateTo<CC_Intel_OCL_BI>>, -+ CCIfCC<"CallingConv::Intel_SVML", CCDelegateTo<CC_Intel_SVML>>, - CCIfSubtarget<"is64Bit()", CCDelegateTo<CC_X86_64>>, - CCDelegateTo<CC_X86_32> - ]>; -@@ -1166,4 +1208,27 @@ def CSR_SysV64_RegCall_NoSSE : CalleeSavedRegs<(add RBX, RBP, RSP, - (sequence "R%u", 12, 15))>; - def CSR_SysV64_RegCall : CalleeSavedRegs<(add CSR_SysV64_RegCall_NoSSE, - (sequence "XMM%u", 8, 15))>; -- -+ -+// SVML calling convention -+def CSR_32_Intel_SVML : CalleeSavedRegs<(add CSR_32_RegCall_NoSSE)>; -+def CSR_32_Intel_SVML_AVX512 : CalleeSavedRegs<(add CSR_32_Intel_SVML, -+ K4, K5, K6, K7)>; -+ -+def CSR_64_Intel_SVML_NoSSE : CalleeSavedRegs<(add RBX, RSI, RDI, RBP, RSP, R12, R13, R14, R15)>; -+ -+def CSR_64_Intel_SVML : CalleeSavedRegs<(add CSR_64_Intel_SVML_NoSSE, -+ (sequence "XMM%u", 8, 15))>; -+def CSR_Win64_Intel_SVML : CalleeSavedRegs<(add CSR_64_Intel_SVML_NoSSE, -+ (sequence "XMM%u", 6, 15))>; -+ -+def CSR_64_Intel_SVML_AVX : CalleeSavedRegs<(add CSR_64_Intel_SVML_NoSSE, -+ (sequence "YMM%u", 8, 15))>; -+def CSR_Win64_Intel_SVML_AVX : CalleeSavedRegs<(add CSR_64_Intel_SVML_NoSSE, -+ (sequence "YMM%u", 6, 15))>; -+ -+def CSR_64_Intel_SVML_AVX512 : CalleeSavedRegs<(add CSR_64_Intel_SVML_NoSSE, -+ (sequence "ZMM%u", 16, 31), -+ K4, K5, K6, K7)>; -+def CSR_Win64_Intel_SVML_AVX512 : CalleeSavedRegs<(add CSR_64_Intel_SVML_NoSSE, -+ (sequence "ZMM%u", 6, 21), -+ K4, K5, K6, K7)>; -diff --git a/lib/Target/X86/X86ISelLowering.cpp b/lib/Target/X86/X86ISelLowering.cpp -index cbdd7135d..c9a73af18 100644 ---- a/lib/Target/X86/X86ISelLowering.cpp -+++ b/lib/Target/X86/X86ISelLowering.cpp -@@ -3623,7 +3623,8 @@ SDValue X86TargetLowering::LowerFormalArguments( - // FIXME: Only some x86_32 calling conventions support AVX512. - if (Subtarget.useAVX512Regs() && - (Is64Bit || (CallConv == CallingConv::X86_VectorCall || -- CallConv == CallingConv::Intel_OCL_BI))) -+ CallConv == CallingConv::Intel_OCL_BI || -+ CallConv == CallingConv::Intel_SVML))) - VecVT = MVT::v16f32; - else if (Subtarget.hasAVX()) - VecVT = MVT::v8f32; -diff --git a/lib/Target/X86/X86RegisterInfo.cpp b/lib/Target/X86/X86RegisterInfo.cpp -index f69626b26..cc381ad67 100644 ---- a/lib/Target/X86/X86RegisterInfo.cpp -+++ b/lib/Target/X86/X86RegisterInfo.cpp -@@ -326,6 +326,23 @@ X86RegisterInfo::getCalleeSavedRegs(const MachineFunction *MF) const { - return CSR_64_Intel_OCL_BI_SaveList; - break; - } -+ case CallingConv::Intel_SVML: { -+ if (Is64Bit) { -+ if (HasAVX512) -+ return IsWin64 ? CSR_Win64_Intel_SVML_AVX512_SaveList : -+ CSR_64_Intel_SVML_AVX512_SaveList; -+ if (HasAVX) -+ return IsWin64 ? CSR_Win64_Intel_SVML_AVX_SaveList : -+ CSR_64_Intel_SVML_AVX_SaveList; -+ -+ return IsWin64 ? CSR_Win64_Intel_SVML_SaveList : -+ CSR_64_Intel_SVML_SaveList; -+ } else { // Is32Bit -+ if (HasAVX512) -+ return CSR_32_Intel_SVML_AVX512_SaveList; -+ return CSR_32_Intel_SVML_SaveList; -+ } -+ } - case CallingConv::HHVM: - return CSR_64_HHVM_SaveList; - case CallingConv::X86_RegCall: -@@ -444,6 +461,23 @@ X86RegisterInfo::getCallPreservedMask(const MachineFunction &MF, - return CSR_64_Intel_OCL_BI_RegMask; - break; - } -+ case CallingConv::Intel_SVML: { -+ if (Is64Bit) { -+ if (HasAVX512) -+ return IsWin64 ? CSR_Win64_Intel_SVML_AVX512_RegMask : -+ CSR_64_Intel_SVML_AVX512_RegMask; -+ if (HasAVX) -+ return IsWin64 ? CSR_Win64_Intel_SVML_AVX_RegMask : -+ CSR_64_Intel_SVML_AVX_RegMask; -+ -+ return IsWin64 ? CSR_Win64_Intel_SVML_RegMask : -+ CSR_64_Intel_SVML_RegMask; -+ } else { // Is32Bit -+ if (HasAVX512) -+ return CSR_32_Intel_SVML_AVX512_RegMask; -+ return CSR_32_Intel_SVML_RegMask; -+ } -+ } - case CallingConv::HHVM: - return CSR_64_HHVM_RegMask; - case CallingConv::X86_RegCall: -diff --git a/lib/Target/X86/X86Subtarget.h b/lib/Target/X86/X86Subtarget.h -index f4e8d3032..d872d3696 100644 ---- a/lib/Target/X86/X86Subtarget.h -+++ b/lib/Target/X86/X86Subtarget.h -@@ -820,6 +820,7 @@ public: - case CallingConv::X86_ThisCall: - case CallingConv::X86_VectorCall: - case CallingConv::Intel_OCL_BI: -+ case CallingConv::Intel_SVML: - return isTargetWin64(); - // This convention allows using the Win64 convention on other targets. - case CallingConv::Win64: -diff --git a/lib/Transforms/Utils/InjectTLIMappings.cpp b/lib/Transforms/Utils/InjectTLIMappings.cpp -index 9192e74b9..8c11e6f05 100644 ---- a/lib/Transforms/Utils/InjectTLIMappings.cpp -+++ b/lib/Transforms/Utils/InjectTLIMappings.cpp -@@ -120,7 +120,8 @@ static void addMappingsFromTLI(const TargetLibraryInfo &TLI, CallInst &CI) { - // All VFs in the TLI are powers of 2. - for (unsigned VF = 2, WidestVF = TLI.getWidestVF(ScalarName); VF <= WidestVF; - VF *= 2) { -- const std::string TLIName = TLI.getVectorizedFunction(ScalarName, VF); -+ bool Ignored = false; -+ const std::string TLIName = TLI.getVectorizedFunction(ScalarName, VF, Ignored, CI.getFastMathFlags().isFast()); - if (!TLIName.empty()) { - std::string MangledName = mangleTLIName(TLIName, CI, VF); - if (!OriginalSetOfMappings.count(MangledName)) { -diff --git a/lib/Transforms/Vectorize/LoopVectorize.cpp b/lib/Transforms/Vectorize/LoopVectorize.cpp -index ebfd5fe8b..1beec43ca 100644 ---- a/lib/Transforms/Vectorize/LoopVectorize.cpp -+++ b/lib/Transforms/Vectorize/LoopVectorize.cpp -@@ -667,6 +667,27 @@ protected: - /// vector of instructions. - void addMetadata(ArrayRef<Value *> To, Instruction *From); - -+ /// Check legality of given SVML call instruction \p VecCall generated for -+ /// scalar call \p Call. If illegal then the appropriate legal instruction -+ /// is returned. -+ Value *legalizeSVMLCall(CallInst *VecCall, CallInst *Call); -+ -+ /// Returns the legal VF for a call instruction \p CI using TTI information -+ /// and vector type. -+ unsigned getLegalVFForCall(CallInst *CI); -+ -+ /// Partially vectorize a given call \p Call by breaking it down into multiple -+ /// calls of \p LegalCall, decided by the variant VF \p LegalVF. -+ Value *partialVectorizeCall(CallInst *Call, CallInst *LegalCall, -+ unsigned LegalVF); -+ -+ /// Generate shufflevector instruction for a vector value \p V based on the -+ /// current \p Part and a smaller VF \p LegalVF. -+ Value *generateShuffleValue(Value *V, unsigned LegalVF, unsigned Part); -+ -+ /// Combine partially vectorized calls stored in \p CallResults. -+ Value *combinePartialVecCalls(SmallVectorImpl<Value *> &CallResults); -+ - /// The original loop. - Loop *OrigLoop; - -@@ -4370,6 +4391,7 @@ void InnerLoopVectorizer::widenInstruction(Instruction &I) { - } - - Function *VectorF; -+ bool FromSVML = false; - if (UseVectorIntrinsic) { - // Use vector version of the intrinsic. - Type *TysForDecl[] = {CI->getType()}; -@@ -4378,7 +4400,8 @@ void InnerLoopVectorizer::widenInstruction(Instruction &I) { - VectorF = Intrinsic::getDeclaration(M, ID, TysForDecl); - } else { - // Use vector version of the library call. -- StringRef VFnName = TLI->getVectorizedFunction(FnName, VF); -+ bool IsFast = CI->getFastMathFlags().isFast(); -+ std::string VFnName = TLI->getVectorizedFunction(FnName, VF, FromSVML, IsFast); - assert(!VFnName.empty() && "Vector function name is empty."); - VectorF = M->getFunction(VFnName); - if (!VectorF) { -@@ -4397,9 +4420,21 @@ void InnerLoopVectorizer::widenInstruction(Instruction &I) { - - if (isa<FPMathOperator>(V)) - V->copyFastMathFlags(CI); -- -- VectorLoopValueMap.setVectorValue(&I, Part, V); -- addMetadata(V, &I); -+ // Perform legalization of SVML call instruction only if original call -+ // was not Intrinsic -+ if (FromSVML) { -+ assert((V->getCalledFunction()->getName()).startswith("__svml")); -+ LLVM_DEBUG(dbgs() << "LV(SVML): Vector call inst:"; V->dump()); -+ V->setCallingConv(CallingConv::Intel_SVML); -+ auto *LegalV = cast<Instruction>(legalizeSVMLCall(V, CI)); -+ LLVM_DEBUG(dbgs() << "LV: Completed SVML legalization.\n LegalV: "; -+ LegalV->dump()); -+ VectorLoopValueMap.setVectorValue(&I, Part, LegalV); -+ addMetadata(LegalV, &I); -+ } else { -+ VectorLoopValueMap.setVectorValue(&I, Part, V); -+ addMetadata(V, &I); -+ } - } - - break; -@@ -4412,6 +4447,242 @@ void InnerLoopVectorizer::widenInstruction(Instruction &I) { - } // end of switch. - } - -+//===----------------------------------------------------------------------===// -+// Implementation of functions for SVML vector call legalization. -+//===----------------------------------------------------------------------===// -+// -+// Unlike other VECLIBs, SVML needs to be used with target-legal -+// vector types. Otherwise, link failures and/or runtime failures -+// will occur. A motivating example could be - -+// -+// double *a; -+// float *b; -+// #pragma clang loop vectorize_width(8) -+// for(i = 0; i < N; ++i) { -+// a[i] = sin(i); // Legal SVML VF must be 4 or below on AVX -+// b[i] = cosf(i); // VF can be 8 on AVX since 8 floats can fit in YMM -+// } -+// -+// Current implementation of vector code generation in LV is -+// driven based on a single VF (in InnerLoopVectorizer::VF). This -+// inhibits the flexibility of adjusting/choosing different VF -+// for different instructions. -+// -+// Due to this limitation it is much more straightforward to -+// first generate the illegal sin8 (svml_sin8 for SVML vector -+// library) call and then legalize it than trying to avoid -+// generating illegal code from the beginning. -+// -+// A solution for this problem is to check legality of the -+// call instruction right after generating it in vectorizer and -+// if it is illegal we split the call arguments and issue multiple -+// calls to match the legal VF. This is demonstrated currently for -+// the SVML vector library calls (non-intrinsic version only). -+// -+// Future directions and extensions: -+// 1) This legalization example shows us that a good direction -+// for the VPlan framework would be to model the vector call -+// instructions in a way that legal VF for each call is chosen -+// correctly within vectorizer and illegal code generation is -+// avoided. -+// 2) This logic can also be extended to general vector functions -+// i.e. legalization OpenMP decalre simd functions. The -+// requirements needed for this will be documented soon. -+ -+Value *InnerLoopVectorizer::legalizeSVMLCall(CallInst *VecCall, -+ CallInst *Call) { -+ unsigned LegalVF = getLegalVFForCall(VecCall); -+ -+ assert(LegalVF > 1 && -+ "Legal VF for SVML call must be greater than 1 to vectorize"); -+ -+ if (LegalVF == VF) -+ return VecCall; -+ else if (LegalVF > VF) -+ // TODO: handle case when we are underfilling vectors -+ return VecCall; -+ -+ // Legal VF for this SVML call is smaller than chosen VF, break it down into -+ // smaller call instructions -+ -+ // Convert args, types and return type to match legal VF -+ SmallVector<Type *, 4> NewTys; -+ SmallVector<Value *, 4> NewArgs; -+ Type *NewRetTy = ToVectorTy(Call->getType(), LegalVF); -+ -+ for (Value *ArgOperand : Call->arg_operands()) { -+ Type *Ty = ToVectorTy(ArgOperand->getType(), LegalVF); -+ NewTys.push_back(Ty); -+ NewArgs.push_back(UndefValue::get(Ty)); -+ } -+ -+ // Construct legal vector function -+ Function *F = Call->getCalledFunction(); -+ StringRef FnName = F->getName(); -+ Module *M = Call->getModule(); -+ bool unused = false; -+ std::string LegalVFnName = TLI->getVectorizedFunction(FnName, LegalVF, unused, Call->getFastMathFlags().isFast()); -+ LLVM_DEBUG(dbgs() << "LV(SVML): LegalVFnName: " << LegalVFnName << " FnName: " << FnName << "\n"); -+ assert(!LegalVFnName.empty() && (LegalVFnName != FnName) && -+ "Could not find legal vector function in TLI."); -+ -+ Function *LegalVectorF = M->getFunction(LegalVFnName); -+ if (!LegalVectorF) { -+ FunctionType *LegalFTy = FunctionType::get(NewRetTy, NewTys, false); -+ LegalVectorF = Function::Create(LegalFTy, Function::ExternalLinkage, LegalVFnName, M); -+ LegalVectorF->copyAttributesFrom(F); -+ } -+ assert(LegalVectorF && "Can't create legal SVML vector function."); -+ -+ LLVM_DEBUG(dbgs() << "LV(SVML): LegalVectorF: "; LegalVectorF->dump()); -+ -+ SmallVector<OperandBundleDef, 1> OpBundles; -+ Call->getOperandBundlesAsDefs(OpBundles); -+ CallInst *LegalV = CallInst::Create(LegalVectorF, NewArgs, OpBundles); -+ -+ if (isa<FPMathOperator>(LegalV)) -+ LegalV->copyFastMathFlags(Call); -+ -+ // Set SVML calling conventions -+ LegalV->setCallingConv(CallingConv::Intel_SVML); -+ -+ LLVM_DEBUG(dbgs() << "LV(SVML): LegalV: "; LegalV->dump()); -+ -+ Value *LegalizedCall = partialVectorizeCall(VecCall, LegalV, LegalVF); -+ -+ LLVM_DEBUG(dbgs() << "LV(SVML): LegalizedCall: "; LegalizedCall->dump()); -+ -+ // Remove the illegal call from Builder -+ VecCall->eraseFromParent(); -+ -+ if (LegalV) -+ delete LegalV; -+ -+ return LegalizedCall; -+} -+ -+unsigned InnerLoopVectorizer::getLegalVFForCall(CallInst *CI) { -+ const DataLayout DL = CI->getModule()->getDataLayout(); -+ FunctionType *CallFT = CI->getFunctionType(); -+ // All functions that need legalization should have a vector return type. -+ // This is true for all SVML functions that are currently supported. -+ assert(isa<VectorType>(CallFT->getReturnType()) && -+ "Return type of call that needs legalization is not a vector."); -+ auto *VecCallRetType = cast<VectorType>(CallFT->getReturnType()); -+ Type *ElemType = VecCallRetType->getElementType(); -+ -+ unsigned TypeBitWidth = DL.getTypeSizeInBits(ElemType); -+ unsigned VectorBitWidth = TTI->getRegisterBitWidth(true); -+ unsigned LegalVF = VectorBitWidth / TypeBitWidth; -+ -+ LLVM_DEBUG(dbgs() << "LV(SVML): Type Bit Width: " << TypeBitWidth << "\n"); -+ LLVM_DEBUG(dbgs() << "LV(SVML): Current VL: " << VF << "\n"); -+ LLVM_DEBUG(dbgs() << "LV(SVML): Vector Bit Width: " << VectorBitWidth -+ << "\n"); -+ LLVM_DEBUG(dbgs() << "LV(SVML): Legal Target VL: " << LegalVF << "\n"); -+ -+ return LegalVF; -+} -+ -+// Partial vectorization of a call instruction is achieved by making clones of -+// \p LegalCall and overwriting its argument operands with shufflevector -+// equivalent decided based on \p LegalVF and current Part being filled. -+Value *InnerLoopVectorizer::partialVectorizeCall(CallInst *Call, -+ CallInst *LegalCall, -+ unsigned LegalVF) { -+ unsigned NumParts = VF / LegalVF; -+ LLVM_DEBUG(dbgs() << "LV(SVML): NumParts: " << NumParts << "\n"); -+ SmallVector<Value *, 8> CallResults; -+ -+ for (unsigned Part = 0; Part < NumParts; ++Part) { -+ auto *ClonedCall = cast<CallInst>(LegalCall->clone()); -+ -+ // Update the arg operand of cloned call to shufflevector -+ for (unsigned i = 0, ie = Call->getNumArgOperands(); i != ie; ++i) { -+ auto *NewOp = generateShuffleValue(Call->getArgOperand(i), LegalVF, Part); -+ ClonedCall->setArgOperand(i, NewOp); -+ } -+ -+ LLVM_DEBUG(dbgs() << "LV(SVML): ClonedCall: "; ClonedCall->dump()); -+ -+ auto *PartialVecCall = Builder.Insert(ClonedCall); -+ CallResults.push_back(PartialVecCall); -+ } -+ -+ return combinePartialVecCalls(CallResults); -+} -+ -+Value *InnerLoopVectorizer::generateShuffleValue(Value *V, unsigned LegalVF, -+ unsigned Part) { -+ // Example: -+ // Consider the following vector code - -+ // %1 = sitofp <4 x i32> %0 to <4 x double> -+ // %2 = call <4 x double> @__svml_sin4(<4 x double> %1) -+ // -+ // If the LegalVF is 2, we partially vectorize the sin4 call by invoking -+ // generateShuffleValue on the operand %1 -+ // If Part = 1, output value is - -+ // %shuffle = shufflevector <4 x double> %1, <4 x double> undef, <2 x i32><i32 0, i32 1> -+ // and if Part = 2, output is - -+ // %shuffle7 =shufflevector <4 x double> %1, <4 x double> undef, <2 x i32><i32 2, i32 3> -+ -+ assert(isa<VectorType>(V->getType()) && -+ "Cannot generate shuffles for non-vector values."); -+ SmallVector<unsigned, 4> ShuffleMask; -+ Value *Undef = UndefValue::get(V->getType()); -+ -+ unsigned ElemIdx = Part * LegalVF; -+ -+ for (unsigned K = 0; K < LegalVF; K++) -+ ShuffleMask.push_back(ElemIdx + K); -+ -+ auto *ShuffleInst = -+ Builder.CreateShuffleVector(V, Undef, ShuffleMask, "shuffle"); -+ -+ return ShuffleInst; -+} -+ -+// Results of the calls executed by smaller legal call instructions must be -+// combined to match the original VF for later use. This is done by constructing -+// shufflevector instructions in a cumulative fashion. -+Value *InnerLoopVectorizer::combinePartialVecCalls( -+ SmallVectorImpl<Value *> &CallResults) { -+ assert(isa<VectorType>(CallResults[0]->getType()) && -+ "Cannot combine calls with non-vector results."); -+ auto *CallType = cast<VectorType>(CallResults[0]->getType()); -+ -+ Value *CombinedShuffle; -+ unsigned NumElems = CallType->getNumElements() * 2; -+ unsigned NumRegs = CallResults.size(); -+ -+ assert(NumRegs >= 2 && isPowerOf2_32(NumRegs) && -+ "Number of partial vector calls to combine must be a power of 2 " -+ "(atleast 2^1)"); -+ -+ while (NumRegs > 1) { -+ for (unsigned I = 0; I < NumRegs; I += 2) { -+ SmallVector<unsigned, 4> ShuffleMask; -+ for (unsigned J = 0; J < NumElems; J++) -+ ShuffleMask.push_back(J); -+ -+ CombinedShuffle = Builder.CreateShuffleVector( -+ CallResults[I], CallResults[I + 1], ShuffleMask, "combined"); -+ LLVM_DEBUG(dbgs() << "LV(SVML): CombinedShuffle:"; -+ CombinedShuffle->dump()); -+ CallResults.push_back(CombinedShuffle); -+ } -+ -+ SmallVector<Value *, 2>::iterator Start = CallResults.begin(); -+ SmallVector<Value *, 2>::iterator End = Start + NumRegs; -+ CallResults.erase(Start, End); -+ -+ NumElems *= 2; -+ NumRegs /= 2; -+ } -+ -+ return CombinedShuffle; -+} -+ - void LoopVectorizationCostModel::collectLoopScalars(unsigned VF) { - // We should not collect Scalars more than once per VF. Right now, this - // function is called from collectUniformsAndScalars(), which already does -diff --git a/test/Transforms/LoopVectorize/X86/svml-calls-finite.ll b/test/Transforms/LoopVectorize/X86/svml-calls-finite.ll -index 5a4bfe5e6..4da2e48a4 100644 ---- a/test/Transforms/LoopVectorize/X86/svml-calls-finite.ll -+++ b/test/Transforms/LoopVectorize/X86/svml-calls-finite.ll -@@ -39,7 +39,8 @@ for.end: ; preds = %for.body - declare double @__exp_finite(double) #0 - - ; CHECK-LABEL: @exp_f64 --; CHECK: <4 x double> @__svml_exp4 -+; CHECK: <2 x double> @__svml_exp2 -+; CHECK: <2 x double> @__svml_exp2 - ; CHECK: ret - define void @exp_f64(double* nocapture %varray) { - entry: -@@ -99,7 +100,8 @@ for.end: ; preds = %for.body - declare double @__log_finite(double) #0 - - ; CHECK-LABEL: @log_f64 --; CHECK: <4 x double> @__svml_log4 -+; CHECK: <2 x double> @__svml_log2 -+; CHECK: <2 x double> @__svml_log2 - ; CHECK: ret - define void @log_f64(double* nocapture %varray) { - entry: -@@ -159,7 +161,8 @@ for.end: ; preds = %for.body - declare double @__pow_finite(double, double) #0 - - ; CHECK-LABEL: @pow_f64 --; CHECK: <4 x double> @__svml_pow4 -+; CHECK: <2 x double> @__svml_pow2 -+; CHECK: <2 x double> @__svml_pow2 - ; CHECK: ret - define void @pow_f64(double* nocapture %varray, double* nocapture readonly %exp) { - entry: -diff --git a/test/Transforms/LoopVectorize/X86/svml-calls.ll b/test/Transforms/LoopVectorize/X86/svml-calls.ll -index 8ff62f178..4d48d9815 100644 ---- a/test/Transforms/LoopVectorize/X86/svml-calls.ll -+++ b/test/Transforms/LoopVectorize/X86/svml-calls.ll -@@ -31,7 +31,7 @@ declare float @llvm.log.f32(float) #0 - - define void @sin_f64(double* nocapture %varray) { - ; CHECK-LABEL: @sin_f64( --; CHECK: [[TMP5:%.*]] = call <4 x double> @__svml_sin4(<4 x double> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x double> @__svml_sin4_ha(<4 x double> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -54,7 +54,7 @@ for.end: - - define void @sin_f32(float* nocapture %varray) { - ; CHECK-LABEL: @sin_f32( --; CHECK: [[TMP5:%.*]] = call <4 x float> @__svml_sinf4(<4 x float> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x float> @__svml_sinf4_ha(<4 x float> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -77,7 +77,7 @@ for.end: - - define void @sin_f64_intrinsic(double* nocapture %varray) { - ; CHECK-LABEL: @sin_f64_intrinsic( --; CHECK: [[TMP5:%.*]] = call <4 x double> @__svml_sin4(<4 x double> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x double> @__svml_sin4_ha(<4 x double> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -100,7 +100,7 @@ for.end: - - define void @sin_f32_intrinsic(float* nocapture %varray) { - ; CHECK-LABEL: @sin_f32_intrinsic( --; CHECK: [[TMP5:%.*]] = call <4 x float> @__svml_sinf4(<4 x float> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x float> @__svml_sinf4_ha(<4 x float> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -123,7 +123,7 @@ for.end: - - define void @cos_f64(double* nocapture %varray) { - ; CHECK-LABEL: @cos_f64( --; CHECK: [[TMP5:%.*]] = call <4 x double> @__svml_cos4(<4 x double> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x double> @__svml_cos4_ha(<4 x double> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -146,7 +146,7 @@ for.end: - - define void @cos_f32(float* nocapture %varray) { - ; CHECK-LABEL: @cos_f32( --; CHECK: [[TMP5:%.*]] = call <4 x float> @__svml_cosf4(<4 x float> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x float> @__svml_cosf4_ha(<4 x float> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -169,7 +169,7 @@ for.end: - - define void @cos_f64_intrinsic(double* nocapture %varray) { - ; CHECK-LABEL: @cos_f64_intrinsic( --; CHECK: [[TMP5:%.*]] = call <4 x double> @__svml_cos4(<4 x double> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x double> @__svml_cos4_ha(<4 x double> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -192,7 +192,7 @@ for.end: - - define void @cos_f32_intrinsic(float* nocapture %varray) { - ; CHECK-LABEL: @cos_f32_intrinsic( --; CHECK: [[TMP5:%.*]] = call <4 x float> @__svml_cosf4(<4 x float> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x float> @__svml_cosf4_ha(<4 x float> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -215,7 +215,7 @@ for.end: - - define void @pow_f64(double* nocapture %varray, double* nocapture readonly %exp) { - ; CHECK-LABEL: @pow_f64( --; CHECK: [[TMP8:%.*]] = call <4 x double> @__svml_pow4(<4 x double> [[TMP4:%.*]], <4 x double> [[WIDE_LOAD:%.*]]) -+; CHECK: [[TMP8:%.*]] = call intel_svmlcc <4 x double> @__svml_pow4_ha(<4 x double> [[TMP4:%.*]], <4 x double> [[WIDE_LOAD:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -240,7 +240,7 @@ for.end: - - define void @pow_f64_intrinsic(double* nocapture %varray, double* nocapture readonly %exp) { - ; CHECK-LABEL: @pow_f64_intrinsic( --; CHECK: [[TMP8:%.*]] = call <4 x double> @__svml_pow4(<4 x double> [[TMP4:%.*]], <4 x double> [[WIDE_LOAD:%.*]]) -+; CHECK: [[TMP8:%.*]] = call intel_svmlcc <4 x double> @__svml_pow4_ha(<4 x double> [[TMP4:%.*]], <4 x double> [[WIDE_LOAD:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -265,7 +265,7 @@ for.end: - - define void @pow_f32(float* nocapture %varray, float* nocapture readonly %exp) { - ; CHECK-LABEL: @pow_f32( --; CHECK: [[TMP8:%.*]] = call <4 x float> @__svml_powf4(<4 x float> [[TMP4:%.*]], <4 x float> [[WIDE_LOAD:%.*]]) -+; CHECK: [[TMP8:%.*]] = call intel_svmlcc <4 x float> @__svml_powf4_ha(<4 x float> [[TMP4:%.*]], <4 x float> [[WIDE_LOAD:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -290,7 +290,7 @@ for.end: - - define void @pow_f32_intrinsic(float* nocapture %varray, float* nocapture readonly %exp) { - ; CHECK-LABEL: @pow_f32_intrinsic( --; CHECK: [[TMP8:%.*]] = call <4 x float> @__svml_powf4(<4 x float> [[TMP4:%.*]], <4 x float> [[WIDE_LOAD:%.*]]) -+; CHECK: [[TMP8:%.*]] = call intel_svmlcc <4 x float> @__svml_powf4_ha(<4 x float> [[TMP4:%.*]], <4 x float> [[WIDE_LOAD:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -315,7 +315,7 @@ for.end: - - define void @exp_f64(double* nocapture %varray) { - ; CHECK-LABEL: @exp_f64( --; CHECK: [[TMP5:%.*]] = call <4 x double> @__svml_exp4(<4 x double> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x double> @__svml_exp4_ha(<4 x double> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -338,7 +338,7 @@ for.end: - - define void @exp_f32(float* nocapture %varray) { - ; CHECK-LABEL: @exp_f32( --; CHECK: [[TMP5:%.*]] = call <4 x float> @__svml_expf4(<4 x float> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x float> @__svml_expf4_ha(<4 x float> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -361,7 +361,7 @@ for.end: - - define void @exp_f64_intrinsic(double* nocapture %varray) { - ; CHECK-LABEL: @exp_f64_intrinsic( --; CHECK: [[TMP5:%.*]] = call <4 x double> @__svml_exp4(<4 x double> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x double> @__svml_exp4_ha(<4 x double> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -384,7 +384,7 @@ for.end: - - define void @exp_f32_intrinsic(float* nocapture %varray) { - ; CHECK-LABEL: @exp_f32_intrinsic( --; CHECK: [[TMP5:%.*]] = call <4 x float> @__svml_expf4(<4 x float> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x float> @__svml_expf4_ha(<4 x float> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -407,7 +407,7 @@ for.end: - - define void @log_f64(double* nocapture %varray) { - ; CHECK-LABEL: @log_f64( --; CHECK: [[TMP5:%.*]] = call <4 x double> @__svml_log4(<4 x double> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x double> @__svml_log4_ha(<4 x double> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -430,7 +430,7 @@ for.end: - - define void @log_f32(float* nocapture %varray) { - ; CHECK-LABEL: @log_f32( --; CHECK: [[TMP5:%.*]] = call <4 x float> @__svml_logf4(<4 x float> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x float> @__svml_logf4_ha(<4 x float> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -453,7 +453,7 @@ for.end: - - define void @log_f64_intrinsic(double* nocapture %varray) { - ; CHECK-LABEL: @log_f64_intrinsic( --; CHECK: [[TMP5:%.*]] = call <4 x double> @__svml_log4(<4 x double> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x double> @__svml_log4_ha(<4 x double> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -476,7 +476,7 @@ for.end: - - define void @log_f32_intrinsic(float* nocapture %varray) { - ; CHECK-LABEL: @log_f32_intrinsic( --; CHECK: [[TMP5:%.*]] = call <4 x float> @__svml_logf4(<4 x float> [[TMP4:%.*]]) -+; CHECK: [[TMP5:%.*]] = call intel_svmlcc <4 x float> @__svml_logf4_ha(<4 x float> [[TMP4:%.*]]) - ; CHECK: ret void - ; - entry: -@@ -497,5 +497,44 @@ for.end: - ret void - } - --attributes #0 = { nounwind readnone } -+; CHECK-LABEL: @atan2_finite -+; CHECK: intel_svmlcc <4 x double> @__svml_atan24 -+; CHECK: intel_svmlcc <4 x double> @__svml_atan24 -+; CHECK: ret -+ -+declare double @__atan2_finite(double, double) local_unnamed_addr #0 - -+define void @atan2_finite([100 x double]* nocapture %varray) local_unnamed_addr #0 { -+entry: -+ br label %for.cond1.preheader -+ -+for.cond1.preheader: ; preds = %for.inc7, %entry -+ %indvars.iv19 = phi i64 [ 0, %entry ], [ %indvars.iv.next20, %for.inc7 ] -+ %0 = trunc i64 %indvars.iv19 to i32 -+ %conv = sitofp i32 %0 to double -+ br label %for.body3 -+ -+for.body3: ; preds = %for.body3, %for.cond1.preheader -+ %indvars.iv = phi i64 [ 0, %for.cond1.preheader ], [ %indvars.iv.next, %for.body3 ] -+ %indvars.iv.next = add nuw nsw i64 %indvars.iv, 1 -+ %1 = trunc i64 %indvars.iv.next to i32 -+ %conv4 = sitofp i32 %1 to double -+ %call = tail call fast double @__atan2_finite(double %conv, double %conv4) -+ %arrayidx6 = getelementptr inbounds [100 x double], [100 x double]* %varray, i64 %indvars.iv19, i64 %indvars.iv -+ store double %call, double* %arrayidx6, align 8 -+ %exitcond = icmp eq i64 %indvars.iv.next, 100 -+ br i1 %exitcond, label %for.inc7, label %for.body3, !llvm.loop !5 -+ -+for.inc7: ; preds = %for.body3 -+ %indvars.iv.next20 = add nuw nsw i64 %indvars.iv19, 1 -+ %exitcond21 = icmp eq i64 %indvars.iv.next20, 100 -+ br i1 %exitcond21, label %for.end9, label %for.cond1.preheader -+ -+for.end9: ; preds = %for.inc7 -+ ret void -+} -+ -+attributes #0 = { nounwind readnone } -+!5 = distinct !{!5, !6, !7} -+!6 = !{!"llvm.loop.vectorize.width", i32 8} -+!7 = !{!"llvm.loop.vectorize.enable", i1 true} -diff --git a/test/Transforms/LoopVectorize/X86/svml-legal-calls.ll b/test/Transforms/LoopVectorize/X86/svml-legal-calls.ll -new file mode 100644 -index 000000000..0524c2841 ---- /dev/null -+++ b/test/Transforms/LoopVectorize/X86/svml-legal-calls.ll -@@ -0,0 +1,513 @@ -+; Check legalization of SVML calls, including intrinsic versions (like @llvm.<fn_name>.<type>). -+ -+; RUN: opt -vector-library=SVML -loop-vectorize -force-vector-width=8 -force-vector-interleave=1 -mattr=avx -S < %s | FileCheck %s -+ -+target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128" -+target triple = "x86_64-unknown-linux-gnu" -+ -+declare double @sin(double) #0 -+declare float @sinf(float) #0 -+declare double @llvm.sin.f64(double) #0 -+declare float @llvm.sin.f32(float) #0 -+ -+declare double @cos(double) #0 -+declare float @cosf(float) #0 -+declare double @llvm.cos.f64(double) #0 -+declare float @llvm.cos.f32(float) #0 -+ -+declare double @pow(double, double) #0 -+declare float @powf(float, float) #0 -+declare double @llvm.pow.f64(double, double) #0 -+declare float @llvm.pow.f32(float, float) #0 -+ -+declare double @exp(double) #0 -+declare float @expf(float) #0 -+declare double @llvm.exp.f64(double) #0 -+declare float @llvm.exp.f32(float) #0 -+ -+declare double @log(double) #0 -+declare float @logf(float) #0 -+declare double @llvm.log.f64(double) #0 -+declare float @llvm.log.f32(float) #0 -+ -+ -+define void @sin_f64(double* nocapture %varray) { -+; CHECK-LABEL: @sin_f64( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_sin4_ha(<4 x double> [[TMP2:%.*]]) -+; CHECK: [[TMP3:%.*]] = call intel_svmlcc <4 x double> @__svml_sin4_ha(<4 x double> [[TMP4:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %call = tail call double @sin(double %conv) -+ %arrayidx = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %call, double* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @sin_f32(float* nocapture %varray) { -+; CHECK-LABEL: @sin_f32( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_sinf8_ha(<8 x float> [[TMP2:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %call = tail call float @sinf(float %conv) -+ %arrayidx = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %call, float* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @sin_f64_intrinsic(double* nocapture %varray) { -+; CHECK-LABEL: @sin_f64_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_sin4_ha(<4 x double> [[TMP2:%.*]]) -+; CHECK: [[TMP3:%.*]] = call intel_svmlcc <4 x double> @__svml_sin4_ha(<4 x double> [[TMP4:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %call = tail call double @llvm.sin.f64(double %conv) -+ %arrayidx = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %call, double* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @sin_f32_intrinsic(float* nocapture %varray) { -+; CHECK-LABEL: @sin_f32_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_sinf8_ha(<8 x float> [[TMP2:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %call = tail call float @llvm.sin.f32(float %conv) -+ %arrayidx = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %call, float* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @cos_f64(double* nocapture %varray) { -+; CHECK-LABEL: @cos_f64( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_cos4_ha(<4 x double> [[TMP2:%.*]]) -+; CHECK: [[TMP3:%.*]] = call intel_svmlcc <4 x double> @__svml_cos4_ha(<4 x double> [[TMP4:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %call = tail call double @cos(double %conv) -+ %arrayidx = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %call, double* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @cos_f32(float* nocapture %varray) { -+; CHECK-LABEL: @cos_f32( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_cosf8_ha(<8 x float> [[TMP2:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %call = tail call float @cosf(float %conv) -+ %arrayidx = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %call, float* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @cos_f64_intrinsic(double* nocapture %varray) { -+; CHECK-LABEL: @cos_f64_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_cos4_ha(<4 x double> [[TMP2:%.*]]) -+; CHECK: [[TMP3:%.*]] = call intel_svmlcc <4 x double> @__svml_cos4_ha(<4 x double> [[TMP4:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %call = tail call double @llvm.cos.f64(double %conv) -+ %arrayidx = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %call, double* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @cos_f32_intrinsic(float* nocapture %varray) { -+; CHECK-LABEL: @cos_f32_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_cosf8_ha(<8 x float> [[TMP2:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %call = tail call float @llvm.cos.f32(float %conv) -+ %arrayidx = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %call, float* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @pow_f64(double* nocapture %varray, double* nocapture readonly %exp) { -+; CHECK-LABEL: @pow_f64( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_pow4_ha(<4 x double> [[TMP2:%.*]], <4 x double> [[TMP3:%.*]]) -+; CHECK: [[TMP4:%.*]] = call intel_svmlcc <4 x double> @__svml_pow4_ha(<4 x double> [[TMP5:%.*]], <4 x double> [[TMP6:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %arrayidx = getelementptr inbounds double, double* %exp, i64 %iv -+ %tmp1 = load double, double* %arrayidx, align 4 -+ %tmp2 = tail call double @pow(double %conv, double %tmp1) -+ %arrayidx2 = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %tmp2, double* %arrayidx2, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @pow_f64_intrinsic(double* nocapture %varray, double* nocapture readonly %exp) { -+; CHECK-LABEL: @pow_f64_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_pow4_ha(<4 x double> [[TMP2:%.*]], <4 x double> [[TMP3:%.*]]) -+; CHECK: [[TMP4:%.*]] = call intel_svmlcc <4 x double> @__svml_pow4_ha(<4 x double> [[TMP5:%.*]], <4 x double> [[TMP6:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %arrayidx = getelementptr inbounds double, double* %exp, i64 %iv -+ %tmp1 = load double, double* %arrayidx, align 4 -+ %tmp2 = tail call double @llvm.pow.f64(double %conv, double %tmp1) -+ %arrayidx2 = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %tmp2, double* %arrayidx2, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @pow_f32(float* nocapture %varray, float* nocapture readonly %exp) { -+; CHECK-LABEL: @pow_f32( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_powf8_ha(<8 x float> [[TMP2:%.*]], <8 x float> [[WIDE_LOAD:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %arrayidx = getelementptr inbounds float, float* %exp, i64 %iv -+ %tmp1 = load float, float* %arrayidx, align 4 -+ %tmp2 = tail call float @powf(float %conv, float %tmp1) -+ %arrayidx2 = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %tmp2, float* %arrayidx2, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @pow_f32_intrinsic(float* nocapture %varray, float* nocapture readonly %exp) { -+; CHECK-LABEL: @pow_f32_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_powf8_ha(<8 x float> [[TMP2:%.*]], <8 x float> [[TMP3:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %arrayidx = getelementptr inbounds float, float* %exp, i64 %iv -+ %tmp1 = load float, float* %arrayidx, align 4 -+ %tmp2 = tail call float @llvm.pow.f32(float %conv, float %tmp1) -+ %arrayidx2 = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %tmp2, float* %arrayidx2, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @exp_f64(double* nocapture %varray) { -+; CHECK-LABEL: @exp_f64( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_exp4_ha(<4 x double> [[TMP2:%.*]]) -+; CHECK: [[TMP3:%.*]] = call intel_svmlcc <4 x double> @__svml_exp4_ha(<4 x double> [[TMP4:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %call = tail call double @exp(double %conv) -+ %arrayidx = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %call, double* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @exp_f32(float* nocapture %varray) { -+; CHECK-LABEL: @exp_f32( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_expf8_ha(<8 x float> [[TMP2:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %call = tail call float @expf(float %conv) -+ %arrayidx = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %call, float* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @exp_f64_intrinsic(double* nocapture %varray) { -+; CHECK-LABEL: @exp_f64_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_exp4_ha(<4 x double> [[TMP2:%.*]]) -+; CHECK: [[TMP3:%.*]] = call intel_svmlcc <4 x double> @__svml_exp4_ha(<4 x double> [[TMP4:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %call = tail call double @llvm.exp.f64(double %conv) -+ %arrayidx = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %call, double* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @exp_f32_intrinsic(float* nocapture %varray) { -+; CHECK-LABEL: @exp_f32_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_expf8_ha(<8 x float> [[TMP2:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %call = tail call float @llvm.exp.f32(float %conv) -+ %arrayidx = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %call, float* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @log_f64(double* nocapture %varray) { -+; CHECK-LABEL: @log_f64( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_log4_ha(<4 x double> [[TMP2:%.*]]) -+; CHECK: [[TMP3:%.*]] = call intel_svmlcc <4 x double> @__svml_log4_ha(<4 x double> [[TMP4:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %call = tail call double @log(double %conv) -+ %arrayidx = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %call, double* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @log_f32(float* nocapture %varray) { -+; CHECK-LABEL: @log_f32( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_logf8_ha(<8 x float> [[TMP2:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %call = tail call float @logf(float %conv) -+ %arrayidx = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %call, float* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @log_f64_intrinsic(double* nocapture %varray) { -+; CHECK-LABEL: @log_f64_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <4 x double> @__svml_log4_ha(<4 x double> [[TMP2:%.*]]) -+; CHECK: [[TMP3:%.*]] = call intel_svmlcc <4 x double> @__svml_log4_ha(<4 x double> [[TMP4:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to double -+ %call = tail call double @llvm.log.f64(double %conv) -+ %arrayidx = getelementptr inbounds double, double* %varray, i64 %iv -+ store double %call, double* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+define void @log_f32_intrinsic(float* nocapture %varray) { -+; CHECK-LABEL: @log_f32_intrinsic( -+; CHECK: [[TMP1:%.*]] = call intel_svmlcc <8 x float> @__svml_logf8_ha(<8 x float> [[TMP2:%.*]]) -+; CHECK: ret void -+; -+entry: -+ br label %for.body -+ -+for.body: -+ %iv = phi i64 [ 0, %entry ], [ %iv.next, %for.body ] -+ %tmp = trunc i64 %iv to i32 -+ %conv = sitofp i32 %tmp to float -+ %call = tail call float @llvm.log.f32(float %conv) -+ %arrayidx = getelementptr inbounds float, float* %varray, i64 %iv -+ store float %call, float* %arrayidx, align 4 -+ %iv.next = add nuw nsw i64 %iv, 1 -+ %exitcond = icmp eq i64 %iv.next, 1000 -+ br i1 %exitcond, label %for.end, label %for.body -+ -+for.end: -+ ret void -+} -+ -+attributes #0 = { nounwind readnone } -+ -diff --git a/test/Transforms/LoopVectorize/X86/svml-legal-codegen.ll b/test/Transforms/LoopVectorize/X86/svml-legal-codegen.ll -new file mode 100644 -index 000000000..007eea7ac ---- /dev/null -+++ b/test/Transforms/LoopVectorize/X86/svml-legal-codegen.ll -@@ -0,0 +1,61 @@ -+; Check that vector codegen splits illegal sin8 call to two sin4 calls on AVX for double datatype. -+; The C code used to generate this test: -+ -+; #include <math.h> -+; -+; void foo(double *a, int N){ -+; int i; -+; #pragma clang loop vectorize_width(8) -+; for (i=0;i<N;i++){ -+; a[i] = sin(i); -+; } -+; } -+ -+; RUN: opt -O2 -vector-library=SVML -loop-vectorize -force-vector-width=8 -mattr=avx -S < %s | FileCheck %s -+ -+; CHECK: [[I1:%.*]] = sitofp <8 x i32> [[I0:%.*]] to <8 x double> -+; CHECK-NEXT: [[S1:%shuffle.*]] = shufflevector <8 x double> [[I1]], <8 x double> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 3> -+; CHECK-NEXT: [[I2:%.*]] = call fast intel_svmlcc <4 x double> @__svml_sin4(<4 x double> [[S1]]) -+; CHECK-NEXT: [[S2:%shuffle.*]] = shufflevector <8 x double> [[I1]], <8 x double> undef, <4 x i32> <i32 4, i32 5, i32 6, i32 7> -+; CHECK-NEXT: [[I3:%.*]] = call fast intel_svmlcc <4 x double> @__svml_sin4(<4 x double> [[S2]]) -+; CHECK-NEXT: [[comb:%combined.*]] = shufflevector <4 x double> [[I2]], <4 x double> [[I3]], <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7> -+; CHECK: store <8 x double> [[comb]], <8 x double>* [[TMP:%.*]], align 8 -+ -+ -+target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128" -+target triple = "x86_64-unknown-linux-gnu" -+ -+; Function Attrs: nounwind uwtable -+define dso_local void @foo(double* nocapture %a, i32 %N) local_unnamed_addr #0 { -+entry: -+ %cmp5 = icmp sgt i32 %N, 0 -+ br i1 %cmp5, label %for.body.preheader, label %for.end -+ -+for.body.preheader: ; preds = %entry -+ %wide.trip.count = zext i32 %N to i64 -+ br label %for.body -+ -+for.body: ; preds = %for.body, %for.body.preheader -+ %indvars.iv = phi i64 [ 0, %for.body.preheader ], [ %indvars.iv.next, %for.body ] -+ %0 = trunc i64 %indvars.iv to i32 -+ %conv = sitofp i32 %0 to double -+ %call = tail call fast double @sin(double %conv) #2 -+ %arrayidx = getelementptr inbounds double, double* %a, i64 %indvars.iv -+ store double %call, double* %arrayidx, align 8, !tbaa !2 -+ %indvars.iv.next = add nuw nsw i64 %indvars.iv, 1 -+ %exitcond = icmp eq i64 %indvars.iv.next, %wide.trip.count -+ br i1 %exitcond, label %for.end, label %for.body, !llvm.loop !6 -+ -+for.end: ; preds = %for.body, %entry -+ ret void -+} -+ -+; Function Attrs: nounwind -+declare dso_local double @sin(double) local_unnamed_addr #1 -+ -+!2 = !{!3, !3, i64 0} -+!3 = !{!"double", !4, i64 0} -+!4 = !{!"omnipotent char", !5, i64 0} -+!5 = !{!"Simple C/C++ TBAA"} -+!6 = distinct !{!6, !7} -+!7 = !{!"llvm.loop.vectorize.width", i32 8} -diff --git a/utils/TableGen/CMakeLists.txt b/utils/TableGen/CMakeLists.txt -index dbca62c5c..93f46127a 100644 ---- a/utils/TableGen/CMakeLists.txt -+++ b/utils/TableGen/CMakeLists.txt -@@ -45,6 +45,7 @@ add_tablegen(llvm-tblgen LLVM - SearchableTableEmitter.cpp - SubtargetEmitter.cpp - SubtargetFeatureInfo.cpp -+ SVMLEmitter.cpp - TableGen.cpp - Types.cpp - X86DisassemblerTables.cpp -diff --git a/utils/TableGen/SVMLEmitter.cpp b/utils/TableGen/SVMLEmitter.cpp -new file mode 100644 -index 000000000..8800ca827 ---- /dev/null -+++ b/utils/TableGen/SVMLEmitter.cpp -@@ -0,0 +1,110 @@ -+//===------ SVMLEmitter.cpp - Generate SVML function variants -------------===// -+// -+// The LLVM Compiler Infrastructure -+// -+// This file is distributed under the University of Illinois Open Source -+// License. See LICENSE.TXT for details. -+// -+//===----------------------------------------------------------------------===// -+// -+// This tablegen backend emits the scalar to svml function map for TLI. -+// -+//===----------------------------------------------------------------------===// -+ -+#include "CodeGenTarget.h" -+#include "llvm/Support/Format.h" -+#include "llvm/TableGen/Error.h" -+#include "llvm/TableGen/Record.h" -+#include "llvm/TableGen/TableGenBackend.h" -+#include <map> -+#include <vector> -+ -+using namespace llvm; -+ -+#define DEBUG_TYPE "SVMLVariants" -+#include "llvm/Support/Debug.h" -+ -+namespace { -+ -+class SVMLVariantsEmitter { -+ -+ RecordKeeper &Records; -+ -+private: -+ void emitSVMLVariants(raw_ostream &OS); -+ -+public: -+ SVMLVariantsEmitter(RecordKeeper &R) : Records(R) {} -+ -+ void run(raw_ostream &OS); -+}; -+} // End anonymous namespace -+ -+/// \brief Emit the set of SVML variant function names. -+// The default is to emit the high accuracy SVML variants until a mechanism is -+// introduced to allow a selection of different variants through precision -+// requirements specified by the user. This code generates mappings to svml -+// that are in the scalar form of llvm intrinsics, math library calls, or the -+// finite variants of math library calls. -+void SVMLVariantsEmitter::emitSVMLVariants(raw_ostream &OS) { -+ -+ const unsigned MinSinglePrecVL = 4; -+ const unsigned MaxSinglePrecVL = 16; -+ const unsigned MinDoublePrecVL = 2; -+ const unsigned MaxDoublePrecVL = 8; -+ -+ OS << "#ifdef GET_SVML_VARIANTS\n"; -+ -+ for (const auto &D : Records.getAllDerivedDefinitions("SvmlVariant")) { -+ StringRef SvmlVariantNameStr = D->getName(); -+ // Single Precision SVML -+ for (unsigned VL = MinSinglePrecVL; VL <= MaxSinglePrecVL; VL *= 2) { -+ // Emit the scalar math library function to svml function entry. -+ OS << "{\"" << SvmlVariantNameStr << "f" << "\", "; -+ OS << "\"" << "__svml_" << SvmlVariantNameStr << "f" << VL << "\", " -+ << VL << "},\n"; -+ -+ // Emit the scalar intrinsic to svml function entry. -+ OS << "{\"" << "llvm." << SvmlVariantNameStr << ".f32" << "\", "; -+ OS << "\"" << "__svml_" << SvmlVariantNameStr << "f" << VL << "\", " -+ << VL << "},\n"; -+ -+ // Emit the finite math library function to svml function entry. -+ OS << "{\"__" << SvmlVariantNameStr << "f_finite" << "\", "; -+ OS << "\"" << "__svml_" << SvmlVariantNameStr << "f" << VL << "\", " -+ << VL << "},\n"; -+ } -+ -+ // Double Precision SVML -+ for (unsigned VL = MinDoublePrecVL; VL <= MaxDoublePrecVL; VL *= 2) { -+ // Emit the scalar math library function to svml function entry. -+ OS << "{\"" << SvmlVariantNameStr << "\", "; -+ OS << "\"" << "__svml_" << SvmlVariantNameStr << VL << "\", " << VL -+ << "},\n"; -+ -+ // Emit the scalar intrinsic to svml function entry. -+ OS << "{\"" << "llvm." << SvmlVariantNameStr << ".f64" << "\", "; -+ OS << "\"" << "__svml_" << SvmlVariantNameStr << VL << "\", " << VL -+ << "},\n"; -+ -+ // Emit the finite math library function to svml function entry. -+ OS << "{\"__" << SvmlVariantNameStr << "_finite" << "\", "; -+ OS << "\"" << "__svml_" << SvmlVariantNameStr << VL << "\", " -+ << VL << "},\n"; -+ } -+ } -+ -+ OS << "#endif // GET_SVML_VARIANTS\n\n"; -+} -+ -+void SVMLVariantsEmitter::run(raw_ostream &OS) { -+ emitSVMLVariants(OS); -+} -+ -+namespace llvm { -+ -+void EmitSVMLVariants(RecordKeeper &RK, raw_ostream &OS) { -+ SVMLVariantsEmitter(RK).run(OS); -+} -+ -+} // End llvm namespace -diff --git a/utils/TableGen/TableGen.cpp b/utils/TableGen/TableGen.cpp -index bdb963c15..076759731 100644 ---- a/utils/TableGen/TableGen.cpp -+++ b/utils/TableGen/TableGen.cpp -@@ -54,6 +54,7 @@ enum ActionType { - GenRegisterBank, - GenExegesis, - GenAutomata, -+ GenSVMLVariants, - }; - - namespace llvm { -@@ -122,7 +123,9 @@ cl::opt<ActionType> Action( - "Generate registers bank descriptions"), - clEnumValN(GenExegesis, "gen-exegesis", - "Generate llvm-exegesis tables"), -- clEnumValN(GenAutomata, "gen-automata", "Generate generic automata"))); -+ clEnumValN(GenAutomata, "gen-automata", "Generate generic automata"), -+ clEnumValN(GenSVMLVariants, "gen-svml", -+ "Generate SVML variant function names"))); - - cl::OptionCategory PrintEnumsCat("Options for -print-enums"); - cl::opt<std::string> Class("class", cl::desc("Print Enum list for this class"), -@@ -247,6 +250,9 @@ bool LLVMTableGenMain(raw_ostream &OS, RecordKeeper &Records) { - case GenAutomata: - EmitAutomata(Records, OS); - break; -+ case GenSVMLVariants: -+ EmitSVMLVariants(Records, OS); -+ break; - } - - return false; -diff --git a/utils/TableGen/TableGenBackends.h b/utils/TableGen/TableGenBackends.h -index 9eef77a45..2c4385286 100644 ---- a/utils/TableGen/TableGenBackends.h -+++ b/utils/TableGen/TableGenBackends.h -@@ -90,6 +90,7 @@ void EmitX86FoldTables(RecordKeeper &RK, raw_ostream &OS); - void EmitRegisterBank(RecordKeeper &RK, raw_ostream &OS); - void EmitExegesis(RecordKeeper &RK, raw_ostream &OS); - void EmitAutomata(RecordKeeper &RK, raw_ostream &OS); -+void EmitSVMLVariants(RecordKeeper &RK, raw_ostream &OS); - - } // End llvm namespace - -diff --git a/utils/vim/syntax/llvm.vim b/utils/vim/syntax/llvm.vim -index 487a37b4b..89095f48f 100644 ---- a/utils/vim/syntax/llvm.vim -+++ b/utils/vim/syntax/llvm.vim -@@ -96,6 +96,7 @@ syn keyword llvmKeyword - \ inreg - \ inteldialect - \ intel_ocl_bicc -+ \ intel_svmlcc - \ internal - \ linkonce - \ linkonce_odr --- -2.21.1 (Apple Git-122.3) - diff --git a/Golden_Repo/l/LLVM/llvm-10.0.1-gcc-9.3.0-drop-visibility-inlines.patch b/Golden_Repo/l/LLVM/llvm-10.0.1-gcc-9.3.0-drop-visibility-inlines.patch deleted file mode 100644 index 61ef0cf43eb421ed1d811c17bd02145b68f5f929..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LLVM/llvm-10.0.1-gcc-9.3.0-drop-visibility-inlines.patch +++ /dev/null @@ -1,17 +0,0 @@ -diff -ruN llvm-10.0.1.src.orig/cmake/modules/HandleLLVMOptions.cmake llvm-10.0.1.src/cmake/modules/HandleLLVMOptions.cmake ---- llvm-10.0.1.src.orig/cmake/modules/HandleLLVMOptions.cmake 2020-07-07 18:21:37.000000000 +0200 -+++ llvm-10.0.1.src/cmake/modules/HandleLLVMOptions.cmake 2020-09-02 18:44:17.280675916 +0200 -@@ -267,13 +267,6 @@ - endif() - endif() - --if(NOT WIN32 AND NOT CYGWIN AND NOT (${CMAKE_SYSTEM_NAME} MATCHES "AIX" AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")) -- # MinGW warns if -fvisibility-inlines-hidden is used. -- # GCC on AIX warns if -fvisibility-inlines-hidden is used. -- check_cxx_compiler_flag("-fvisibility-inlines-hidden" SUPPORTS_FVISIBILITY_INLINES_HIDDEN_FLAG) -- append_if(SUPPORTS_FVISIBILITY_INLINES_HIDDEN_FLAG "-fvisibility-inlines-hidden" CMAKE_CXX_FLAGS) --endif() -- - if(CMAKE_SIZEOF_VOID_P EQUAL 8 AND MINGW) - add_definitions( -D_FILE_OFFSET_BITS=64 ) - endif() diff --git a/Golden_Repo/l/LMDB/LMDB-0.9.24-GCCcore-10.3.0.eb b/Golden_Repo/l/LMDB/LMDB-0.9.24-GCCcore-10.3.0.eb deleted file mode 100644 index 2ee129ad0336d044576d75f78b63f302d0f1ea25..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LMDB/LMDB-0.9.24-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'MakeCp' - -name = 'LMDB' -version = '0.9.24' - -homepage = 'https://symas.com/lmdb' -description = """LMDB is a fast, memory-efficient database. With memory-mapped files, it has the read performance - of a pure in-memory database while retaining the persistence of standard disk-based databases.""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://github.com/LMDB/lmdb/archive/'] -sources = ['%(name)s_%(version)s.tar.gz'] -checksums = ['44602436c52c29d4f301f55f6fd8115f945469b868348e3cddaf91ab2473ea26'] - -builddependencies = [('binutils', '2.36.1')] - -buildopts = 'CC="$CC" OPT="$CFLAGS"' - -runtest = 'test' - -files_to_copy = [ - (['lmdb.h', 'midl.h'], 'include'), - (['mdb_copy', 'mdb_dump', 'mdb_load', 'mdb_stat'], 'bin'), - (['liblmdb.a', 'liblmdb.%s' % SHLIB_EXT], 'lib'), -] - -sanity_check_paths = { - 'files': ['bin/mdb_copy', 'bin/mdb_dump', 'bin/mdb_load', 'bin/mdb_stat', 'include/lmdb.h', - 'include/midl.h', 'lib/liblmdb.a', 'lib/liblmdb.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/LMDB/LMDB-0.9.24-GCCcore-9.3.0.eb b/Golden_Repo/l/LMDB/LMDB-0.9.24-GCCcore-9.3.0.eb deleted file mode 100644 index 15e9841f451691d32f5dff5bc024084aa6bf47d0..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LMDB/LMDB-0.9.24-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'MakeCp' - -name = 'LMDB' -version = '0.9.24' - -homepage = 'https://symas.com/lmdb' -description = """LMDB is a fast, memory-efficient database. With memory-mapped files, it has the read performance - of a pure in-memory database while retaining the persistence of standard disk-based databases.""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://github.com/LMDB/lmdb/archive/'] -sources = ['%(name)s_%(version)s.tar.gz'] -checksums = ['44602436c52c29d4f301f55f6fd8115f945469b868348e3cddaf91ab2473ea26'] - -builddependencies = [('binutils', '2.34')] - -buildopts = 'CC="$CC" OPT="$CFLAGS"' - -runtest = 'test' - -files_to_copy = [ - (['lmdb.h', 'midl.h'], 'include'), - (['mdb_copy', 'mdb_dump', 'mdb_load', 'mdb_stat'], 'bin'), - (['liblmdb.a', 'liblmdb.%s' % SHLIB_EXT], 'lib'), -] - -sanity_check_paths = { - 'files': ['bin/mdb_copy', 'bin/mdb_dump', 'bin/mdb_load', 'bin/mdb_stat', 'include/lmdb.h', - 'include/midl.h', 'lib/liblmdb.a', 'lib/liblmdb.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/LibTIFF/LibTIFF-4.1.0-GCCcore-10.3.0.eb b/Golden_Repo/l/LibTIFF/LibTIFF-4.1.0-GCCcore-10.3.0.eb deleted file mode 100644 index 1bd726c18ca32a3b5078e99f0b4990c2a8e0c8ef..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LibTIFF/LibTIFF-4.1.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch>, Alan O'Cais (JSC) -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of -# the policy: -# http://hpcbios.readthedocs.org/en/latest/ -## -easyblock = 'ConfigureMake' - -name = 'LibTIFF' -version = '4.1.0' - -homepage = 'http://www.simplesystems.org/libtiff/' -description = """ - tiff: Library and tools for reading and writing TIFF data files -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'http://download.osgeo.org/libtiff/', - 'ftp://ftp.remotesensing.org/pub/libtiff/', -] -sources = ['tiff-%(version)s.tar.gz'] - -builddependencies = [('binutils', '2.36.1')] - -configopts = '--enable-ld-version-script' - -sanity_check_paths = { - 'files': ['bin/tiffinfo'], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/LibTIFF/LibTIFF-4.1.0-GCCcore-9.3.0.eb b/Golden_Repo/l/LibTIFF/LibTIFF-4.1.0-GCCcore-9.3.0.eb deleted file mode 100644 index 8f880df722d5ff05916f24eaea1b7ba911187789..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LibTIFF/LibTIFF-4.1.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch>, Alan O'Cais (JSC) -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of -# the policy: -# http://hpcbios.readthedocs.org/en/latest/ -## -easyblock = 'ConfigureMake' - -name = 'LibTIFF' -version = '4.1.0' - -homepage = 'http://www.simplesystems.org/libtiff/' -description = """ - tiff: Library and tools for reading and writing TIFF data files -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [ - 'http://download.osgeo.org/libtiff/', - 'ftp://ftp.remotesensing.org/pub/libtiff/', -] -sources = ['tiff-%(version)s.tar.gz'] - -builddependencies = [('binutils', '2.34')] - -configopts = '--enable-ld-version-script' - -sanity_check_paths = { - 'files': ['bin/tiffinfo'], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/Libint/Libint-2.7.0-beta.6-iompi-2020_cp2k_lmax5.eb b/Golden_Repo/l/Libint/Libint-2.7.0-beta.6-iompi-2020_cp2k_lmax5.eb deleted file mode 100644 index b34da178d72507da669089385557289e98871390..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/Libint/Libint-2.7.0-beta.6-iompi-2020_cp2k_lmax5.eb +++ /dev/null @@ -1,55 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Libint' -version = '2.7.0-beta.6' -versionsuffix = '_cp2k_lmax5' - -homepage = 'https://github.com/evaleev/libint' -description = '''Libint library is used to evaluate the traditional (electron repulsion) and certain novel two-body - matrix elements (integrals) over Cartesian Gaussian functions used in modern atomic and molecular theory. - Libint is configured for maximum angular momentum Lmax=5 suitable for CP2K 7.1''' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True, 'cstd': 'c++11'} - -source_urls = ['https://github.com/evaleev/libint/archive'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('Autotools', '20200321'), - ('GMP', '6.2.0'), - ('Boost', '1.73.0'), - ('Eigen', '3.3.7'), - ('Python', '3.8.5'), - ('CMake', '3.18.0'), -] - -separate_build_dir = False - -local_expdir = 'libint_temp' -local_l = 5 -local_compiler_configopts = '--enable-eri=1 --enable-eri2=1 --enable-eri3=1 --with-opt-am=3 --enable-generic-code ' -local_compiler_configopts += '--disable-unrolling --enable-shared --with-libint-exportdir=%s ' % local_expdir -local_compiler_configopts += '--with-max-am={} '.format(local_l) -local_compiler_configopts += '--with-eri-max-am={},{} '.format(local_l, local_l - 1) -local_compiler_configopts += '--with-eri2-max-am={},{} '.format(local_l + 1, local_l + 2) -local_compiler_configopts += '--with-eri3-max-am={},{} '.format(local_l + 1, local_l + 2) - -preconfigopts = './autogen.sh && mkdir objects && pushd objects && ../configure %s && ' % local_compiler_configopts -preconfigopts += 'make -j 24 export && popd && tar xvf objects/%s.tgz && pushd %s && ' % (local_expdir, local_expdir) -preconfigopts += 'CMAKE_PREFIX_PATH=$EBROOTEIGEN:$CMAKE_PREFIX_PATH ' - -configopts = '-DENABLE_FORTRAN=ON -DBUILD_SHARED_LIBS=ON' - -prebuildopts = 'pushd %s && ' % local_expdir -preinstallopts = prebuildopts -pretestopts = prebuildopts - -runtest = 'check' - -sanity_check_paths = { - 'files': ['include/libint2.h', 'include/libint2.hpp', 'include/libint_f.mod', 'lib/libint2.%s' % SHLIB_EXT], - 'dirs': ['include', 'include/libint2', 'lib', 'share'] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/l/Libint/Libint-2.7.0-beta.6-ipsmpi-2020_cp2k_lmax5.eb b/Golden_Repo/l/Libint/Libint-2.7.0-beta.6-ipsmpi-2020_cp2k_lmax5.eb deleted file mode 100644 index cab203f7c4a23aa4136ef1686a22279a30c07b40..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/Libint/Libint-2.7.0-beta.6-ipsmpi-2020_cp2k_lmax5.eb +++ /dev/null @@ -1,55 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Libint' -version = '2.7.0-beta.6' -versionsuffix = '_cp2k_lmax5' - -homepage = 'https://github.com/evaleev/libint' -description = '''Libint library is used to evaluate the traditional (electron repulsion) and certain novel two-body - matrix elements (integrals) over Cartesian Gaussian functions used in modern atomic and molecular theory. - Libint is configured for maximum angular momentum Lmax=5 suitable for CP2K 7.1''' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'cstd': 'c++11'} - -source_urls = ['https://github.com/evaleev/libint/archive'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('Autotools', '20200321'), - ('GMP', '6.2.0'), - ('Boost', '1.73.0'), - ('Eigen', '3.3.7'), - ('Python', '3.8.5'), - ('CMake', '3.18.0'), -] - -separate_build_dir = False - -local_expdir = 'libint_temp' -local_l = 5 -local_compiler_configopts = '--enable-eri=1 --enable-eri2=1 --enable-eri3=1 --with-opt-am=3 --enable-generic-code ' -local_compiler_configopts += '--disable-unrolling --enable-shared --with-libint-exportdir=%s ' % local_expdir -local_compiler_configopts += '--with-max-am={} '.format(local_l) -local_compiler_configopts += '--with-eri-max-am={},{} '.format(local_l, local_l - 1) -local_compiler_configopts += '--with-eri2-max-am={},{} '.format(local_l + 1, local_l + 2) -local_compiler_configopts += '--with-eri3-max-am={},{} '.format(local_l + 1, local_l + 2) - -preconfigopts = './autogen.sh && mkdir objects && pushd objects && ../configure %s && ' % local_compiler_configopts -preconfigopts += 'make -j 24 export && popd && tar xvf objects/%s.tgz && pushd %s && ' % (local_expdir, local_expdir) -preconfigopts += 'CMAKE_PREFIX_PATH=$EBROOTEIGEN:$CMAKE_PREFIX_PATH ' - -configopts = '-DENABLE_FORTRAN=ON -DBUILD_SHARED_LIBS=ON' - -prebuildopts = 'pushd %s && ' % local_expdir -preinstallopts = prebuildopts -pretestopts = prebuildopts - -runtest = 'check' - -sanity_check_paths = { - 'files': ['include/libint2.h', 'include/libint2.hpp', 'include/libint_f.mod', 'lib/libint2.%s' % SHLIB_EXT], - 'dirs': ['include', 'include/libint2', 'lib', 'share'] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/l/LittleCMS/LittleCMS-2.11-GCCcore-10.3.0.eb b/Golden_Repo/l/LittleCMS/LittleCMS-2.11-GCCcore-10.3.0.eb deleted file mode 100644 index 516c0a13be36d33014825696ec581242ecda6585..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LittleCMS/LittleCMS-2.11-GCCcore-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'LittleCMS' -version = '2.11' - -homepage = 'http://www.littlecms.com/' -description = """ Little CMS intends to be an OPEN SOURCE small-footprint color management engine, - with special focus on accuracy and performance. """ - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://sourceforge.net/projects/lcms/files/lcms/%(version)s/'] -sources = ['lcms2-%(version)s.tar.gz'] -checksums = ['dc49b9c8e4d7cdff376040571a722902b682a795bf92985a85b48854c270772e'] - -builddependencies = [ - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.36.1'), -] - -dependencies = [('libjpeg-turbo', '2.0.5')] - -sanity_check_paths = { - 'files': ['bin/jpgicc', 'bin/linkicc', 'bin/psicc', 'bin/transicc', 'include/lcms2.h', 'include/lcms2_plugin.h', - 'lib/liblcms2.a', 'lib/liblcms2.%s' % SHLIB_EXT, 'lib/pkgconfig/lcms2.pc'], - 'dirs': ['share/man'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/LittleCMS/LittleCMS-2.11-GCCcore-9.3.0.eb b/Golden_Repo/l/LittleCMS/LittleCMS-2.11-GCCcore-9.3.0.eb deleted file mode 100644 index c2d625f241e1bc9550ded3296ee91ecb96ee443d..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/LittleCMS/LittleCMS-2.11-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'LittleCMS' -version = '2.11' - -homepage = 'http://www.littlecms.com/' -description = """ Little CMS intends to be an OPEN SOURCE small-footprint color management engine, - with special focus on accuracy and performance. """ - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://sourceforge.net/projects/lcms/files/lcms/%(version)s/'] -sources = ['lcms2-%(version)s.tar.gz'] -checksums = ['dc49b9c8e4d7cdff376040571a722902b682a795bf92985a85b48854c270772e'] - -builddependencies = [ - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.34'), -] - -dependencies = [('libjpeg-turbo', '2.0.5')] - -sanity_check_paths = { - 'files': ['bin/jpgicc', 'bin/linkicc', 'bin/psicc', 'bin/transicc', 'include/lcms2.h', 'include/lcms2_plugin.h', - 'lib/liblcms2.a', 'lib/liblcms2.%s' % SHLIB_EXT, 'lib/pkgconfig/lcms2.pc'], - 'dirs': ['share/man'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/libcerf/libcerf-1.13-GCCcore-10.3.0.eb b/Golden_Repo/l/libcerf/libcerf-1.13-GCCcore-10.3.0.eb deleted file mode 100644 index 7a791b4b36cc922c18391fd90892456d5dc3d889..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libcerf/libcerf-1.13-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libcerf' -version = '1.13' - -homepage = 'http://apps.jcns.fz-juelich.de/src/libcerf' -description = """libcerf is a self-contained numeric library that provides an efficient and accurate - implementation of complex error functions, along with Dawson, Faddeeva, and Voigt functions. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TGZ] -source_urls = [ - 'http://apps.jcns.fz-juelich.de/src/libcerf/', - 'http://apps.jcns.fz-juelich.de/src/libcerf/old', - 'https://jugit.fz-juelich.de/mlz/libcerf/uploads/924b8d245ad3461107ec630734dfc781/', -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -separate_build_dir = True - -configopts = '-DLIB_MAN=OFF' - -sanity_check_paths = { - 'files': ['lib/libcerf.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'math' diff --git a/Golden_Repo/l/libcerf/libcerf-1.13-GCCcore-9.3.0.eb b/Golden_Repo/l/libcerf/libcerf-1.13-GCCcore-9.3.0.eb deleted file mode 100644 index b7fbf15baf70122e97b246f24f88779dc40a8fd6..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libcerf/libcerf-1.13-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libcerf' -version = '1.13' - -homepage = 'http://apps.jcns.fz-juelich.de/src/libcerf' -description = """libcerf is a self-contained numeric library that provides an efficient and accurate - implementation of complex error functions, along with Dawson, Faddeeva, and Voigt functions. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TGZ] -source_urls = [ - 'http://apps.jcns.fz-juelich.de/src/libcerf/', - 'http://apps.jcns.fz-juelich.de/src/libcerf/old', - 'https://jugit.fz-juelich.de/mlz/libcerf/uploads/924b8d245ad3461107ec630734dfc781/', -] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -separate_build_dir = True - -configopts = '-DLIB_MAN=OFF' - -sanity_check_paths = { - 'files': ['lib/libcerf.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'math' diff --git a/Golden_Repo/l/libcroco/libcroco-0.6.13-GCCcore-10.3.0.eb b/Golden_Repo/l/libcroco/libcroco-0.6.13-GCCcore-10.3.0.eb deleted file mode 100644 index f27a14eeefa8427c4ac1ea7b53544a3a760e23c0..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libcroco/libcroco-0.6.13-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libcroco' -version = '0.6.13' - -homepage = 'https://github.com/GNOME/libcroco' -description = """Libcroco is a standalone css2 parsing and manipulation library.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_XZ] -source_urls = ['http://ftp.gnome.org/pub/GNOME/sources/libcroco/%(version_major_minor)s/'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('libxml2', '2.9.10'), - ('GLib', '2.64.4'), -] - -sanity_check_paths = { - 'files': ['bin/csslint-%(version_major_minor)s', 'lib/libcroco-%%(version_major_minor)s.%s' % SHLIB_EXT, - 'lib/libcroco-%(version_major_minor)s.a'], - 'dirs': ['include/libcroco-%(version_major_minor)s', 'share'] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libcroco/libcroco-0.6.13-GCCcore-9.3.0.eb b/Golden_Repo/l/libcroco/libcroco-0.6.13-GCCcore-9.3.0.eb deleted file mode 100644 index 2bf460972c429e9c78e6ece5509f482d940d427a..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libcroco/libcroco-0.6.13-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libcroco' -version = '0.6.13' - -homepage = 'https://github.com/GNOME/libcroco' -description = """Libcroco is a standalone css2 parsing and manipulation library.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_XZ] -source_urls = ['http://ftp.gnome.org/pub/GNOME/sources/libcroco/%(version_major_minor)s/'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('libxml2', '2.9.10'), - ('GLib', '2.64.4'), -] - -sanity_check_paths = { - 'files': ['bin/csslint-%(version_major_minor)s', 'lib/libcroco-%%(version_major_minor)s.%s' % SHLIB_EXT, - 'lib/libcroco-%(version_major_minor)s.a'], - 'dirs': ['include/libcroco-%(version_major_minor)s', 'share'] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libctl/libctl-4.5.0-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/l/libctl/libctl-4.5.0-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 75a65cf618de24226922643514c4cbe10361df73..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libctl/libctl-4.5.0-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,19 +0,0 @@ -easyblock = 'ConfigureMake' -name = 'libctl' -version = '4.5.0' - -homepage = 'http://ab-initio.mit.edu/libctl' -description = """libctl is a free Guile-based library implementing flexible control files for scientific simulations.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -toolchainopts = {'optarch': True} - -source_urls = ['https://github.com/stevengj/libctl/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('guile', '2.2.4') -] - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libctl/libctl-4.5.0-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/l/libctl/libctl-4.5.0-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index c5a2010872bdd8600c2591e3b6b2802c12fb7e3d..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libctl/libctl-4.5.0-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,20 +0,0 @@ -easyblock = 'ConfigureMake' -name = 'libctl' -version = '4.5.0' - -homepage = 'http://ab-initio.mit.edu/libctl' -description = """libctl is a free Guile-based library implementing flexible control files for scientific simulations.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} -toolchainopts = {'optarch': True} - -source_urls = ['https://github.com/stevengj/libctl/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('guile', '2.2.4') -] - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libdap/libdap-3.20.6-GCCcore-10.3.0.eb b/Golden_Repo/l/libdap/libdap-3.20.6-GCCcore-10.3.0.eb deleted file mode 100644 index 0567801025473c9a8bb575ecec66a715e5360d6d..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libdap/libdap-3.20.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libdap' -version = '3.20.6' - -homepage = 'http://opendap.org/download/libdap' -description = """A C++ SDK which contains an implementation of DAP 2.0 - and DAP 4.0. This includes both Client- and Server-side support classes. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://www.opendap.org/pub/source/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('Bison', '3.7.6'), - ('flex', '2.6.4'), - ('util-linux', '2.36'), - ('binutils', '2.36.1'), -] - -dependencies = [ - ('cURL', '7.71.1'), - ('libxml2-python', '2.9.10', '-Python-3.8.5'), -] - -sanity_check_paths = { - 'files': ['bin/getdap', 'bin/getdap4', 'bin/dap-config', 'lib/libdap.a', 'lib/libdap.%s' % SHLIB_EXT], - 'dirs': ['include'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libdap/libdap-3.20.6-GCCcore-9.3.0.eb b/Golden_Repo/l/libdap/libdap-3.20.6-GCCcore-9.3.0.eb deleted file mode 100644 index b6d1c14fc09693ecdb3e6bdba5e35d3bcd2f5cce..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libdap/libdap-3.20.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libdap' -version = '3.20.6' - -homepage = 'http://opendap.org/download/libdap' -description = """A C++ SDK which contains an implementation of DAP 2.0 - and DAP 4.0. This includes both Client- and Server-side support classes. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://www.opendap.org/pub/source/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('Bison', '3.6.4'), - ('flex', '2.6.4'), - ('util-linux', '2.36'), - ('binutils', '2.34'), -] - -dependencies = [ - ('cURL', '7.71.1'), - ('libxml2-python', '2.9.10', '-Python-3.8.5'), -] - -sanity_check_paths = { - 'files': ['bin/getdap', 'bin/getdap4', 'bin/dap-config', 'lib/libdap.a', 'lib/libdap.%s' % SHLIB_EXT], - 'dirs': ['include'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libdrm/libdrm-2.4.106-GCCcore-10.3.0.eb b/Golden_Repo/l/libdrm/libdrm-2.4.106-GCCcore-10.3.0.eb deleted file mode 100644 index dcfee3062ac350282ebabbca91e5031da7106daf..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libdrm/libdrm-2.4.106-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'libdrm' -version = '2.4.106' - -homepage = 'https://dri.freedesktop.org' -description = """Direct Rendering Manager runtime library.""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - - -site_contacts = 'a.kreuzer@fz-juelich.de' - -source_urls = ['https://dri.freedesktop.org/libdrm/'] -sources = [SOURCELOWER_TAR_XZ] -checksums = ['92d8ac54429b171e087e61c2894dc5399fe6a549b1fbba09fa6a3cb9d4e57bd4'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), -] -dependencies = [('X11', '20200222')] - -# installing manpages requires an extra build dependency (docbook xsl) -configopts = '-Dman-pages=false' - -sanity_check_paths = { - 'files': ['lib/libdrm.%s' % SHLIB_EXT, 'include/libdrm/drm.h'], - 'dirs': ['include', 'lib'], -} - - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libdrm/libdrm-2.4.99-GCCcore-9.3.0.eb b/Golden_Repo/l/libdrm/libdrm-2.4.99-GCCcore-9.3.0.eb deleted file mode 100644 index df8c7bf06aed4631edf74d156244c12ddb8b79a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libdrm/libdrm-2.4.99-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libdrm' -version = '2.4.99' - -homepage = 'http://dri.freedesktop.org' -description = 'Direct Rendering Manager runtime library.' - -site_contacts = 'a.kreuzer@fz-juelich.de' - -source_urls = ['http://dri.freedesktop.org/libdrm/'] -sources = [SOURCELOWER_TAR_GZ] - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('X11', '20200222'), -] - -sanity_check_paths = { - 'files': ['include/xf86drm.h', 'include/xf86drmMode.h', 'lib/libdrm_intel.%s' % SHLIB_EXT, - 'lib/libdrm_radeon.%s' % SHLIB_EXT, 'lib/libdrm.%s' % SHLIB_EXT, 'lib/libkms.%s' % SHLIB_EXT], - 'dirs': ['include/libdrm', 'include/libkms', 'lib/pkgconfig'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libdwarf/libdwarf-20201020-GCCcore-10.3.0.eb b/Golden_Repo/l/libdwarf/libdwarf-20201020-GCCcore-10.3.0.eb deleted file mode 100644 index 0977e24e4b223f6c5c175b7a875b486633972803..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libdwarf/libdwarf-20201020-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libdwarf' -version = '20201020' - -homepage = 'http://www.prevanders.net/dwarf.html' -description = """The DWARF Debugging Information Format is of interest to programmers working on compilers -and debuggers (and anyone interested in reading or writing DWARF information)) -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = ['http://www.prevanders.net'] - -builddependencies = [ - ('binutils', '2.36.1') -] - -dependencies = [ - ('libelf', '0.8.13'), -] - -configopts = "--enable-shared " - -sanity_check_paths = { - 'files': [], - 'dirs': ["bin", "lib", "include"] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/l/libdwarf/libdwarf-20201020-GCCcore-9.3.0.eb b/Golden_Repo/l/libdwarf/libdwarf-20201020-GCCcore-9.3.0.eb deleted file mode 100644 index 21067600dc7b331285582323c2d8da6ada3cf5fd..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libdwarf/libdwarf-20201020-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libdwarf' -version = '20201020' - -homepage = 'http://www.prevanders.net/dwarf.html' -description = """The DWARF Debugging Information Format is of interest to programmers working on compilers -and debuggers (and anyone interested in reading or writing DWARF information)) -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = ['http://www.prevanders.net'] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('libelf', '0.8.13'), -] - -configopts = "--enable-shared " - -sanity_check_paths = { - 'files': [], - 'dirs': ["bin", "lib", "include"] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/l/libelf/libelf-0.8.13-GCCcore-10.3.0.eb b/Golden_Repo/l/libelf/libelf-0.8.13-GCCcore-10.3.0.eb deleted file mode 100644 index 60ae3534d1d2d5f90541ee8cacec1cb46ca3e7e0..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libelf/libelf-0.8.13-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libelf' -version = '0.8.13' - -homepage = 'https://web.archive.org/web/20190223180146/http://www.mr511.de/software/english.html' -# The original existed here http://www.mr511.de/software/english.html' -# The only available source code is for an earlier version at https://github.com/WolfgangSt/libelf -description = """libelf is a free ELF object file access library -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [ - 'https://web.archive.org/web/20170808201535/http://www.mr511.de/software/', - 'https://fossies.org/linux/misc/old/' -] -checksums = ['591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d'] - -builddependencies = [ - ('binutils', '2.36.1') -] - -modextrapaths = {'CPATH': 'include/libelf'} - -sanity_check_paths = { - 'files': ['lib/libelf.a', 'lib/libelf.%s' % SHLIB_EXT, 'lib/libelf.so.0', 'include/libelf/libelf.h'], - 'dirs': ['lib/pkgconfig'] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/l/libelf/libelf-0.8.13-GCCcore-9.3.0.eb b/Golden_Repo/l/libelf/libelf-0.8.13-GCCcore-9.3.0.eb deleted file mode 100644 index 82ed13c9ac5e0690f06383460ea7cc7f2ad3193e..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libelf/libelf-0.8.13-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libelf' -version = '0.8.13' - -homepage = 'https://web.archive.org/web/20190223180146/http://www.mr511.de/software/english.html' -# The original existed here http://www.mr511.de/software/english.html' -# The only available source code is for an earlier version at https://github.com/WolfgangSt/libelf -description = """libelf is a free ELF object file access library -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [ - 'https://web.archive.org/web/20170808201535/http://www.mr511.de/software/', - 'https://fossies.org/linux/misc/old/' -] -checksums = ['591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d'] - -builddependencies = [ - ('binutils', '2.34') -] - -modextrapaths = {'CPATH': 'include/libelf'} - -sanity_check_paths = { - 'files': ['lib/libelf.a', 'lib/libelf.%s' % SHLIB_EXT, 'lib/libelf.so.0', 'include/libelf/libelf.h'], - 'dirs': ['lib/pkgconfig'] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/l/libepoxy/libepoxy-1.5.4-GCCcore-10.3.0.eb b/Golden_Repo/l/libepoxy/libepoxy-1.5.4-GCCcore-10.3.0.eb deleted file mode 100644 index 34809b7a3b1ba1959ddbb190e80f68874293838b..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libepoxy/libepoxy-1.5.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libepoxy' -version = '1.5.4' - -homepage = 'https://github.com/anholt/libepoxy' -description = """ - Epoxy is a library for handling OpenGL function pointer management for you -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/anholt/%(name)s/archive/'] -sources = ['%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('OpenGL', '2020'), -] - -preconfigopts = "NOCONFIGURE=1 ./autogen.sh && " -configopts = '--enable-egl=yes ' - -sanity_check_paths = { - 'files': ['include/epoxy/common.h', 'lib/libepoxy.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/libepoxy/libepoxy-1.5.4-GCCcore-9.3.0.eb b/Golden_Repo/l/libepoxy/libepoxy-1.5.4-GCCcore-9.3.0.eb deleted file mode 100644 index 824353ec4245f9245f2be498a6b30e7c67523d62..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libepoxy/libepoxy-1.5.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libepoxy' -version = '1.5.4' - -homepage = 'https://github.com/anholt/libepoxy' -description = """ - Epoxy is a library for handling OpenGL function pointer management for you -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/anholt/%(name)s/archive/'] -sources = ['%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('OpenGL', '2020'), -] - -preconfigopts = "NOCONFIGURE=1 ./autogen.sh && " -configopts = '--enable-egl=yes ' - -sanity_check_paths = { - 'files': ['include/epoxy/common.h', 'lib/libepoxy.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/libevent/libevent-2.1.12-GCCcore-10.3.0.eb b/Golden_Repo/l/libevent/libevent-2.1.12-GCCcore-10.3.0.eb deleted file mode 100644 index 71060955e82f74f703ba9592c87a017fce672738..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libevent/libevent-2.1.12-GCCcore-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libevent' -version = '2.1.12' - -homepage = 'https://libevent.org/' - -description = """The libevent API provides a mechanism to execute a callback function when a specific -event occurs on a file descriptor or after a timeout has been reached. -Furthermore, libevent also support callbacks due to signals or regular timeouts. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://github.com/%(name)s/%(name)s/releases/download/release-%(version)s-stable/'] -sources = ['%(name)s-%(version)s-stable.tar.gz'] -checksums = ['92e6de1be9ec176428fd2367677e61ceffc2ee1cb119035037a27d346b0403bb'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -sanity_check_paths = { - 'files': ['bin/event_rpcgen.py', 'include/event.h', 'include/event2/event.h', - 'lib/libevent_core.%s' % SHLIB_EXT, 'lib/pkgconfig/libevent.pc'], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libevent/libevent-2.1.12-GCCcore-9.3.0.eb b/Golden_Repo/l/libevent/libevent-2.1.12-GCCcore-9.3.0.eb deleted file mode 100644 index aaf12570f706f58dc4e478eb79a4e1bfd0a99f59..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libevent/libevent-2.1.12-GCCcore-9.3.0.eb +++ /dev/null @@ -1,20 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libevent' -version = '2.1.12' - -homepage = 'http://libevent.org/' -description = """The libevent API provides a mechanism to execute a callback function when a specific -event occurs on a file descriptor or after a timeout has been reached. -Furthermore, libevent also support callbacks due to signals or regular timeouts. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -source_urls = ['https://github.com/%(name)s/%(name)s/releases/download/release-%(version)s-stable/'] -sources = ['%(name)s-%(version)s-stable.tar.gz'] - -builddependencies = [('binutils', '2.34')] - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libffi/libffi-3.3-GCCcore-10.3.0.eb b/Golden_Repo/l/libffi/libffi-3.3-GCCcore-10.3.0.eb deleted file mode 100644 index d10c77cb3fb2e6404d9a75f95185d7923e7d3032..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libffi/libffi-3.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libffi' -version = '3.3' - -homepage = 'http://sourceware.org/libffi/' -description = """The libffi library provides a portable, high level programming interface to various calling -conventions. This allows a programmer to call any function specified by a call interface description at run-time. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'ftp://sourceware.org/pub/libffi/', - 'http://www.mirrorservice.org/sites/sourceware.org/pub/libffi/', -] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [('binutils', '2.36.1')] - -sanity_check_paths = { - 'files': [('lib/libffi.%s' % SHLIB_EXT, 'lib64/libffi.%s' % SHLIB_EXT), - ('lib/libffi.a', 'lib64/libffi.a')], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libffi/libffi-3.3-GCCcore-9.3.0.eb b/Golden_Repo/l/libffi/libffi-3.3-GCCcore-9.3.0.eb deleted file mode 100644 index 0d9f4de192f3fe56ce4867105ea99c47230868e3..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libffi/libffi-3.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libffi' -version = '3.3' - -homepage = 'http://sourceware.org/libffi/' -description = """The libffi library provides a portable, high level programming interface to various calling -conventions. This allows a programmer to call any function specified by a call interface description at run-time. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [ - 'ftp://sourceware.org/pub/libffi/', - 'http://www.mirrorservice.org/sites/sourceware.org/pub/libffi/', -] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [('binutils', '2.34')] - -sanity_check_paths = { - 'files': [('lib/libffi.%s' % SHLIB_EXT, 'lib64/libffi.%s' % SHLIB_EXT), - ('lib/libffi.a', 'lib64/libffi.a')], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libgd/libgd-2.3.0-GCCcore-10.3.0.eb b/Golden_Repo/l/libgd/libgd-2.3.0-GCCcore-10.3.0.eb deleted file mode 100644 index 718db0cf89911ae57f33784802c7f24d9bc88f67..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libgd/libgd-2.3.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libgd' -version = '2.3.0' - -homepage = 'https://github.com/libgd' -description = """GD is an open source code library for the dynamic creation of -images by programmers. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'https://github.com/libgd/libgd/releases/download/gd-%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('X11', '20200222'), - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -sanity_check_paths = { - 'files': ["lib/libgd.a", "lib/libgd.%s" % SHLIB_EXT], - 'dirs': ["bin", "include"], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libgd/libgd-2.3.0-GCCcore-9.3.0.eb b/Golden_Repo/l/libgd/libgd-2.3.0-GCCcore-9.3.0.eb deleted file mode 100644 index a4fffff59e5f4ebc52b0eb8abdce010f46583c18..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libgd/libgd-2.3.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libgd' -version = '2.3.0' - -homepage = 'https://github.com/libgd' -description = """GD is an open source code library for the dynamic creation of -images by programmers. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/libgd/libgd/releases/download/gd-%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('X11', '20200222'), - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -sanity_check_paths = { - 'files': ["lib/libgd.a", "lib/libgd.%s" % SHLIB_EXT], - 'dirs': ["bin", "include"], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libgeotiff/libgeotiff-1.6.0-GCCcore-10.3.0.eb b/Golden_Repo/l/libgeotiff/libgeotiff-1.6.0-GCCcore-10.3.0.eb deleted file mode 100644 index de50eac251ba1b056c42f01d85cdfbd00565e159..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libgeotiff/libgeotiff-1.6.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libgeotiff' -version = '1.6.0' - -homepage = 'https://trac.osgeo.org/geotiff/' -description = """Library for reading and writing coordinate system information - from/to GeoTIFF files""" - -site_contacts = 'Alexandre Strube <a.strube@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://download.osgeo.org/geotiff/libgeotiff'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('PROJ', '7.1.0'), - ('libjpeg-turbo', '2.0.5'), - ('zlib', '1.2.11'), - ('SQLite', '3.32.3'), - ('LibTIFF', '4.1.0'), - ('cURL', '7.71.1'), -] - -configopts = '--with-libtiff=$EBROOTLIBTIFF' -configopts += ' --with-proj=$EBROOTPROJ ' -configopts += ' --with-zlib=$EBROOTZLIB' -configopts += ' --with-jpeg=$EBROOTLIBJPEGMINTURBO' - -sanity_check_paths = { - 'files': ['bin/listgeo', 'lib/libgeotiff.a', - 'lib/libgeotiff.%s' % SHLIB_EXT], - 'dirs': ['include', 'share'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libgeotiff/libgeotiff-1.6.0-GCCcore-9.3.0.eb b/Golden_Repo/l/libgeotiff/libgeotiff-1.6.0-GCCcore-9.3.0.eb deleted file mode 100644 index 8ed70321074faf7b29e0d7c39239bfaeac61a893..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libgeotiff/libgeotiff-1.6.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libgeotiff' -version = '1.6.0' - -homepage = 'https://trac.osgeo.org/geotiff/' -description = """Library for reading and writing coordinate system information - from/to GeoTIFF files""" - -site_contacts = 'Alexandre Strube <a.strube@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://download.osgeo.org/geotiff/libgeotiff'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('PROJ', '7.1.0'), - ('libjpeg-turbo', '2.0.5'), - ('zlib', '1.2.11'), - ('SQLite', '3.32.3'), - ('LibTIFF', '4.1.0'), - ('cURL', '7.71.1'), -] - -configopts = '--with-libtiff=$EBROOTLIBTIFF' -configopts += ' --with-proj=$EBROOTPROJ ' -configopts += ' --with-zlib=$EBROOTZLIB' -configopts += ' --with-jpeg=$EBROOTLIBJPEGMINTURBO' - -sanity_check_paths = { - 'files': ['bin/listgeo', 'lib/libgeotiff.a', - 'lib/libgeotiff.%s' % SHLIB_EXT], - 'dirs': ['include', 'share'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libiconv/libiconv-1.16-GCCcore-10.3.0.eb b/Golden_Repo/l/libiconv/libiconv-1.16-GCCcore-10.3.0.eb deleted file mode 100644 index 822b37b687a19e5b168b2a0dbecf62ae2725bdd9..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libiconv/libiconv-1.16-GCCcore-10.3.0.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libiconv' -version = '1.16' - -homepage = 'https://www.gnu.org/software/libiconv' -description = """Libiconv converts from one character encoding to another -through Unicode conversion""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [('binutils', '2.36.1')] - -sanity_check_paths = { - 'files': ['bin/iconv', 'include/iconv.h', 'include/libcharset.h', 'include/localcharset.h', - 'lib/libcharset.a', 'lib/libcharset.%s' % SHLIB_EXT, 'lib/libiconv.%s' % SHLIB_EXT], - 'dirs': ['share'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libiconv/libiconv-1.16-GCCcore-9.3.0.eb b/Golden_Repo/l/libiconv/libiconv-1.16-GCCcore-9.3.0.eb deleted file mode 100644 index 6ab6f5ef26ce9d0c8f1d623744d93f1381e09762..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libiconv/libiconv-1.16-GCCcore-9.3.0.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libiconv' -version = '1.16' - -homepage = 'https://www.gnu.org/software/libiconv' -description = """Libiconv converts from one character encoding to another -through Unicode conversion""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [('binutils', '2.34')] - -sanity_check_paths = { - 'files': ['bin/iconv', 'include/iconv.h', 'include/libcharset.h', 'include/localcharset.h', - 'lib/libcharset.a', 'lib/libcharset.%s' % SHLIB_EXT, 'lib/libiconv.%s' % SHLIB_EXT], - 'dirs': ['share'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libjpeg-turbo/libjpeg-turbo-2.0.5-GCCcore-10.3.0.eb b/Golden_Repo/l/libjpeg-turbo/libjpeg-turbo-2.0.5-GCCcore-10.3.0.eb deleted file mode 100644 index e6ca596d8f5785b560622d86d61beb58f3ccb782..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libjpeg-turbo/libjpeg-turbo-2.0.5-GCCcore-10.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libjpeg-turbo' -version = '2.0.5' - -homepage = 'http://sourceforge.net/projects/libjpeg-turbo/' - -description = """ -libjpeg-turbo is a fork of the original IJG libjpeg which uses SIMD to -accelerate baseline JPEG compression and decompression. libjpeg is a library -that implements JPEG image encoding, decoding and transcoding. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['16f8f6f2715b3a38ab562a84357c793dd56ae9899ce130563c72cd93d8357b5d'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('binutils', '2.36.1'), -] - -dependencies = [ - ('NASM', '2.15.03'), -] - -configopts = ' -G"Unix Makefiles" -DWITH_JPEG8=1' - -runtest = "test" - -sanity_check_paths = { - 'files': ['bin/cjpeg', 'bin/djpeg', 'bin/jpegtran', 'bin/rdjpgcom', - 'bin/tjbench', 'bin/wrjpgcom', 'lib/libjpeg.a', - 'lib/libjpeg.%s' % SHLIB_EXT, 'lib/libturbojpeg.a', - 'lib/libturbojpeg.%s' % SHLIB_EXT], - 'dirs': ['include', 'share'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libjpeg-turbo/libjpeg-turbo-2.0.5-GCCcore-9.3.0.eb b/Golden_Repo/l/libjpeg-turbo/libjpeg-turbo-2.0.5-GCCcore-9.3.0.eb deleted file mode 100644 index 4a57fb1c768dbb7e1a4e56746f6a1777caa995a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libjpeg-turbo/libjpeg-turbo-2.0.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libjpeg-turbo' -version = '2.0.5' - -homepage = 'http://sourceforge.net/projects/libjpeg-turbo/' - -description = """ -libjpeg-turbo is a fork of the original IJG libjpeg which uses SIMD to -accelerate baseline JPEG compression and decompression. libjpeg is a library -that implements JPEG image encoding, decoding and transcoding. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['16f8f6f2715b3a38ab562a84357c793dd56ae9899ce130563c72cd93d8357b5d'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34'), -] - -dependencies = [ - ('NASM', '2.15.03'), -] - -configopts = ' -G"Unix Makefiles" -DWITH_JPEG8=1' - -runtest = "test" - -sanity_check_paths = { - 'files': ['bin/cjpeg', 'bin/djpeg', 'bin/jpegtran', 'bin/rdjpgcom', - 'bin/tjbench', 'bin/wrjpgcom', 'lib/libjpeg.a', - 'lib/libjpeg.%s' % SHLIB_EXT, 'lib/libturbojpeg.a', - 'lib/libturbojpeg.%s' % SHLIB_EXT], - 'dirs': ['include', 'share'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libmatheval/003-guile2.0.patch b/Golden_Repo/l/libmatheval/003-guile2.0.patch deleted file mode 100644 index 3f592cadb0dee1e8fb2386791d9c67632e7943be..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libmatheval/003-guile2.0.patch +++ /dev/null @@ -1,402 +0,0 @@ -Description: Increase precision of floating point tests - guile-2.0 has increased the precision of the floating point maths returns, - so the test suite needs to allow for the correct values to be returned - with higher precision. Thanks to Dave Pigott <dave.pigott@linaro.org> - Also adapt the configure script to build against guile-2.0 - patch from - Hilo Bengen <bengen@debian.org>. - . - libmatheval (1.1.11+dfsg-1.1) unstable; urgency=low - . - * Non-maintainer upload. - * Migrate to guile-2.0 - patch from Hilo Bengen, - extended to support higher precision of return values - by guile-2.0. (Closes: #746013) -Author: Neil Williams <codehelp@debian.org> -Bug-Debian: https://bugs.debian.org/746013 - ---- - ---- libmatheval-1.1.11+dfsg.orig/configure.in -+++ libmatheval-1.1.11+dfsg/configure.in -@@ -60,10 +60,11 @@ dnl Checks for library functions. - AC_CHECK_FUNCS([bzero memset], [break]) - - dnl Additional Guile feature checks. -+CFLAGS="$CFLAGS $GUILE_CFLAGS" - AC_CHECK_TYPE([scm_t_bits], [AC_DEFINE([HAVE_SCM_T_BITS], [1], [Define to 1 if you have the `scm_t_bits' type.])], [], [#include <libguile.h>]) --AC_CHECK_LIB([guile], [scm_c_define_gsubr], [AC_DEFINE([HAVE_SCM_C_DEFINE_GSUBR], [1], [Define to 1 if you have the `scm_c_define_gsubr' function.])], [], [$GUILE_LDFLAGS]) --AC_CHECK_LIB([guile], [scm_make_gsubr], [AC_DEFINE([HAVE_SCM_MAKE_GSUBR], [1], [Define to 1 if you have the `scm_make_gsubr' function.])], [], [$GUILE_LDFLAGS]) --AC_CHECK_LIB([guile], [scm_num2dbl], [AC_DEFINE([HAVE_SCM_NUM2DBL], [1], [Define to 1 if you have the `scm_num2dbl' function.])], [], [$GUILE_LDFLAGS]) -+AC_CHECK_LIB([guile-2.0], [scm_c_define_gsubr], [AC_DEFINE([HAVE_SCM_C_DEFINE_GSUBR], [1], [Define to 1 if you have the `scm_c_define_gsubr' function.])], [], [$GUILE_LDFLAGS]) -+AC_CHECK_LIB([guile-2.0], [scm_make_gsubr], [AC_DEFINE([HAVE_SCM_MAKE_GSUBR], [1], [Define to 1 if you have the `scm_make_gsubr' function.])], [], [$GUILE_LDFLAGS]) -+AC_CHECK_LIB([guile-2.0], [scm_num2dbl], [AC_DEFINE([HAVE_SCM_NUM2DBL], [1], [Define to 1 if you have the `scm_num2dbl' function.])], [], [$GUILE_LDFLAGS]) - - AC_CONFIG_FILES([Makefile doc/Makefile lib/Makefile]) - AC_OUTPUT(libmatheval.pc) ---- libmatheval-1.1.11+dfsg.orig/tests/basics.at -+++ libmatheval-1.1.11+dfsg/tests/basics.at -@@ -62,7 +62,7 @@ AT_DATA([basics.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh basics.scm], [ignore], [10.0], [ignore]) -+AT_CHECK([matheval.sh basics.scm], [ignore], [10.000000000000002], [ignore]) - - AT_DATA([basics.scm], - [[ -@@ -70,7 +70,7 @@ AT_DATA([basics.scm], - (display (evaluator-evaluate-x f 0.7)) - ]]) - --AT_CHECK([matheval.sh basics.scm], [ignore], [0.220966666722528], [ignore]) -+AT_CHECK([matheval.sh basics.scm], [ignore], [0.22096666672252796], [ignore]) - - AT_DATA([basics.scm], - [[ -@@ -78,7 +78,7 @@ AT_DATA([basics.scm], - (display (evaluator-evaluate-x-y f 0.4 -0.7)) - ]]) - --AT_CHECK([matheval.sh basics.scm], [ignore], [-1.14962406520749], [ignore]) -+AT_CHECK([matheval.sh basics.scm], [ignore], [-1.1496240652074883], [ignore]) - - AT_DATA([basics.scm], - [[ -@@ -86,7 +86,7 @@ AT_DATA([basics.scm], - (display (evaluator-evaluate-x-y-z f 11.2 0.41 -0.66)) - ]]) - --AT_CHECK([matheval.sh basics.scm], [ignore], [3.99876152571934], [ignore]) -+AT_CHECK([matheval.sh basics.scm], [ignore], [3.9987615257193383], [ignore]) - - AT_DATA([basics.scm], - [[ ---- libmatheval-1.1.11+dfsg.orig/tests/constants.at -+++ libmatheval-1.1.11+dfsg/tests/constants.at -@@ -29,7 +29,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [2.71828182845905], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [2.718281828459045], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -37,7 +37,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [1.44269504088896], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [1.4426950408889634], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -45,7 +45,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [0.434294481903252], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [0.4342944819032518], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -53,7 +53,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [0.693147180559945], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [0.6931471805599453], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -61,7 +61,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [2.30258509299405], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [2.302585092994046], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -69,7 +69,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [3.14159265358979], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [3.141592653589793], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -77,7 +77,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [1.5707963267949], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [1.5707963267948966], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -85,7 +85,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [0.785398163397448], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [0.7853981633974483], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -93,7 +93,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [0.318309886183791], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [0.3183098861837907], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -101,7 +101,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [0.636619772367581], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [0.6366197723675814], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -109,7 +109,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [1.12837916709551], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [1.1283791670955126], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -117,7 +117,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [1.4142135623731], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [1.4142135623730951], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -125,7 +125,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [0.707106781186548], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [0.7071067811865476], [ignore]) - - AT_DATA([constant.scm], - [[ -@@ -133,7 +133,7 @@ AT_DATA([constant.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh constant.scm], [ignore], [10.0], [ignore]) -+AT_CHECK([matheval.sh constant.scm], [ignore], [10.000000000000002], [ignore]) - - AT_DATA([constant.scm], - [[ ---- libmatheval-1.1.11+dfsg.orig/tests/functions.at -+++ libmatheval-1.1.11+dfsg/tests/functions.at -@@ -29,7 +29,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [2.71828182845905], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [2.718281828459045], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -80,7 +80,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.841470984807897], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.8414709848078965], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -97,7 +97,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.54030230586814], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.5403023058681398], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -114,7 +114,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [1.5574077246549], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [1.5574077246549023], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -131,7 +131,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.642092615934331], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.6420926159343306], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -148,7 +148,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [1.85081571768093], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [1.8508157176809255], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -165,7 +165,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [1.18839510577812], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [1.1883951057781212], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -182,7 +182,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [1.5707963267949], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [1.5707963267948966], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -216,7 +216,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.785398163397448], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.7853981633974483], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -233,7 +233,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.785398163397448], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.7853981633974483], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -267,7 +267,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [1.5707963267949], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [1.5707963267948966], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -284,7 +284,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [1.1752011936438], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [1.1752011936438014], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -301,7 +301,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [1.54308063481524], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [1.5430806348152437], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -318,7 +318,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.761594155955765], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.7615941559557649], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -335,7 +335,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [1.31303528549933], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [1.3130352854993315], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -352,7 +352,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.648054273663885], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.6480542736638855], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -368,7 +368,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.850918128239322], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.8509181282393216], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -385,7 +385,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.881373587019543], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.8813735870195429], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -419,7 +419,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 0.5)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.549306144334055], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.5493061443340549], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -436,7 +436,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 2)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.549306144334055], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.5493061443340549], [ignore]) - - AT_DATA([function.scm], - [[ -@@ -470,7 +470,7 @@ AT_DATA([function.scm], - (display (evaluator-evaluate-x f 1)) - ]]) - --AT_CHECK([matheval.sh function.scm], [ignore], [0.881373587019543], [ignore]) -+AT_CHECK([matheval.sh function.scm], [ignore], [0.8813735870195429], [ignore]) - - AT_DATA([function.scm], - [[ ---- libmatheval-1.1.11+dfsg.orig/tests/numbers.at -+++ libmatheval-1.1.11+dfsg/tests/numbers.at -@@ -53,6 +53,6 @@ AT_DATA([number.scm], - (display (evaluator-evaluate-x f 0)) - ]]) - --AT_CHECK([matheval.sh number.scm], [ignore], [0.644394014977254], [ignore]) -+AT_CHECK([matheval.sh number.scm], [ignore], [0.6443940149772542], [ignore]) - - AT_CLEANUP diff --git a/Golden_Repo/l/libmatheval/libmatheval-1.1.11-GCCcore-10.3.0.eb b/Golden_Repo/l/libmatheval/libmatheval-1.1.11-GCCcore-10.3.0.eb deleted file mode 100644 index a04f4da874de9c4a63dd5b906ed8a2386556aeeb..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libmatheval/libmatheval-1.1.11-GCCcore-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libmatheval' -version = '1.1.11' - -homepage = 'http://www.gnu.org/software/libmatheval/' -description = """GNU libmatheval is a library (callable from C and Fortran) to parse - and evaluate symbolic expressions input as text. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -patches = [ - '003-guile2.0.patch', - 'libmatheval-1.1.11_fix-matheval-test.patch' -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('flex', '2.6.4'), - ('Bison', '3.7.6'), - ('byacc', '20200330'), - # guile 2.2.X removed scm_num2dbl (among others), which are needed for libmatheval (at least for 1.1.11) - ('guile', '2.0.14'), -] - -configopts = '--with-pic ' - -# fix for guile-config being broken because shebang line contains full path to bin/guile -configopts += 'GUILE_CONFIG="$EBROOTGUILE/bin/guile -e main -s $EBROOTGUILE/bin/guile-config"' - -sanity_check_paths = { - 'files': ['lib/libmatheval.a', 'include/matheval.h'], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libmatheval/libmatheval-1.1.11-GCCcore-9.3.0.eb b/Golden_Repo/l/libmatheval/libmatheval-1.1.11-GCCcore-9.3.0.eb deleted file mode 100644 index a732ac2d1600f1a52529c186fbf25d3281b439ed..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libmatheval/libmatheval-1.1.11-GCCcore-9.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libmatheval' -version = '1.1.11' - -homepage = 'http://www.gnu.org/software/libmatheval/' -description = """GNU libmatheval is a library (callable from C and Fortran) to parse - and evaluate symbolic expressions input as text. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -patches = [ - '003-guile2.0.patch', - 'libmatheval-1.1.11_fix-matheval-test.patch' -] - -builddependencies = [ - ('binutils', '2.34'), - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('byacc', '20200330'), - # guile 2.2.X removed scm_num2dbl (among others), which are needed for libmatheval (at least for 1.1.11) - ('guile', '2.0.14'), -] - -configopts = '--with-pic ' - -# fix for guile-config being broken because shebang line contains full path to bin/guile -configopts += 'GUILE_CONFIG="$EBROOTGUILE/bin/guile -e main -s $EBROOTGUILE/bin/guile-config"' - -sanity_check_paths = { - 'files': ['lib/libmatheval.a', 'include/matheval.h'], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libmatheval/libmatheval-1.1.11_fix-matheval-test.patch b/Golden_Repo/l/libmatheval/libmatheval-1.1.11_fix-matheval-test.patch deleted file mode 100644 index 25bb0b17c816e0ef9bd920a0e5f7d9c784aec578..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libmatheval/libmatheval-1.1.11_fix-matheval-test.patch +++ /dev/null @@ -1,32 +0,0 @@ -fix for "matheval.c:37:2: error: #error Neither scm_num2dbl() nor SCM_NUM2DBL available" -patch obtained via https://aur.archlinux.org/cgit/aur.git/tree/removeifndefs.patch?h=libmatheval -diff -aur a/tests/matheval.c b/tests/matheval.c ---- a/tests/matheval.c 2016-03-24 13:55:00.163074189 +0000 -+++ b/tests/matheval.c 2016-03-24 13:52:59.492996682 +0000 -@@ -26,26 +26,6 @@ - #include <matheval.h> - #include "config.h" - --#ifndef HAVE_SCM_T_BITS --typedef long scm_t_bits; --#endif -- --#ifndef HAVE_SCM_NUM2DBL --#ifdef SCM_NUM2DBL --#define scm_num2dbl(x,s) SCM_NUM2DBL(x) --#else --#error Neither scm_num2dbl() nor SCM_NUM2DBL available --#endif --#endif -- --#ifndef HAVE_SCM_C_DEFINE_GSUBR --#ifdef HAVE_SCM_MAKE_GSUBR --#define scm_c_define_gsubr scm_make_gsubr --#else --#error Neither scm_c_define_gsubr() nor scm_make_gsubr() available --#endif --#endif -- - static scm_t_bits evaluator_tag; /* Unique identifier for Guile - * objects of evaluator type. */ - diff --git a/Golden_Repo/l/libpciaccess/libpciaccess-0.16-GCCcore-10.3.0.eb b/Golden_Repo/l/libpciaccess/libpciaccess-0.16-GCCcore-10.3.0.eb deleted file mode 100644 index 142d22d246cf8a59d7d75e8308e52a4cadeed43f..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libpciaccess/libpciaccess-0.16-GCCcore-10.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libpciaccess' -version = '0.16' - -homepage = 'https://cgit.freedesktop.org/xorg/lib/libpciaccess/' -description = """Generic PCI access library.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://www.x.org/releases/individual/lib/'] -sources = [SOURCE_TAR_GZ] -checksums = ['84413553994aef0070cf420050aa5c0a51b1956b404920e21b81e96db6a61a27'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), - ('xorg-macros', '1.19.2'), -] - -sanity_check_paths = { - 'files': ['include/pciaccess.h', 'lib/libpciaccess.a'], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'system' diff --git a/Golden_Repo/l/libpciaccess/libpciaccess-0.16-GCCcore-9.3.0.eb b/Golden_Repo/l/libpciaccess/libpciaccess-0.16-GCCcore-9.3.0.eb deleted file mode 100644 index 5a9c81fc7c5579cee7dd356be7be8813a2fd978c..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libpciaccess/libpciaccess-0.16-GCCcore-9.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libpciaccess' -version = '0.16' - -homepage = 'https://cgit.freedesktop.org/xorg/lib/libpciaccess/' -description = """Generic PCI access library.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://www.x.org/releases/individual/lib/'] -sources = [SOURCE_TAR_GZ] -checksums = ['84413553994aef0070cf420050aa5c0a51b1956b404920e21b81e96db6a61a27'] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), - ('xorg-macros', '1.19.2'), -] - -sanity_check_paths = { - 'files': ['include/pciaccess.h', 'lib/libpciaccess.a'], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'system' diff --git a/Golden_Repo/l/libpng/libpng-1.6.37-GCCcore-10.3.0.eb b/Golden_Repo/l/libpng/libpng-1.6.37-GCCcore-10.3.0.eb deleted file mode 100644 index e2b20639a25f41e05ee859488da8385f8dac09a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libpng/libpng-1.6.37-GCCcore-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libpng' -version = '1.6.37' - -homepage = 'http://www.libpng.org/pub/png/libpng.html' -description = """libpng is the official PNG reference library -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = "--with-pic" - -local_majminver = ''.join(version.split('.')[:2]) -sanity_check_paths = { - 'files': ['include/pngconf.h', 'include/png.h', 'include/pnglibconf.h', - 'lib/libpng.a', - 'lib/libpng.%s' % SHLIB_EXT, - 'lib/libpng%s.a' % local_majminver, - 'lib/libpng%s.%s' % (local_majminver, SHLIB_EXT)], - 'dirs': ['bin', 'include/libpng%s' % local_majminver, 'share/man'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libpng/libpng-1.6.37-GCCcore-9.3.0.eb b/Golden_Repo/l/libpng/libpng-1.6.37-GCCcore-9.3.0.eb deleted file mode 100644 index ff2700968d29993ceadbc792a4976f0d48dbeda0..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libpng/libpng-1.6.37-GCCcore-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libpng' -version = '1.6.37' - -homepage = 'http://www.libpng.org/pub/png/libpng.html' -description = """libpng is the official PNG reference library -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = "--with-pic" - -local_majminver = ''.join(version.split('.')[:2]) -sanity_check_paths = { - 'files': ['include/pngconf.h', 'include/png.h', 'include/pnglibconf.h', - 'lib/libpng.a', - 'lib/libpng.%s' % SHLIB_EXT, - 'lib/libpng%s.a' % local_majminver, - 'lib/libpng%s.%s' % (local_majminver, SHLIB_EXT)], - 'dirs': ['bin', 'include/libpng%s' % local_majminver, 'share/man'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libreadline/libreadline-8.0-GCCcore-10.3.0.eb b/Golden_Repo/l/libreadline/libreadline-8.0-GCCcore-10.3.0.eb deleted file mode 100644 index 43fc9b75f98353f00f408591c9652f2796c1fe04..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libreadline/libreadline-8.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libreadline' -version = '8.0' - -homepage = 'https://tiswww.case.edu/php/chet/readline/rltop.html' -description = """ -The GNU Readline library provides a set of functions for use by applications -that allow users to edit command lines as they are typed in. Both Emacs and -vi editing modes are available. The Readline library includes additional -functions to maintain a list of previously-entered command lines, to recall -and perhaps reedit those lines, and perform csh-like history expansion on -previous commands. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://ftp.gnu.org/gnu/readline'] -sources = ['readline-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), -] -dependencies = [ - ('ncurses', '6.2'), -] - -# for the termcap symbols, use EB ncurses -buildopts = "SHLIB_LIBS='-lncurses'" - -sanity_check_paths = { - 'files': ['lib/libreadline.a', 'lib/libhistory.a'] + - ['include/readline/%s' % x - for x in ['chardefs.h', 'history.h', 'keymaps.h', 'readline.h', - 'rlconf.h', 'rlstdc.h', 'rltypedefs.h', 'tilde.h']], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libreadline/libreadline-8.0-GCCcore-9.3.0.eb b/Golden_Repo/l/libreadline/libreadline-8.0-GCCcore-9.3.0.eb deleted file mode 100644 index 42f5a0d25f36af98200bc340843dd854ba924fce..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libreadline/libreadline-8.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libreadline' -version = '8.0' - -homepage = 'http://cnswww.cns.cwru.edu/php/chet/readline/rltop.html' -description = """The GNU Readline library provides a set of functions for use -by applications that allow users to edit command lines as they are typed in. -Both Emacs and vi editing modes are available. The Readline library includes -additional functions to maintain a list of previously-entered command lines, to -recall and perhaps reedit those lines, and perform csh-like history expansion -on previous commands. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = ['readline-%(version)s.tar.gz'] -source_urls = ['http://ftp.gnu.org/gnu/readline'] - -dependencies = [ - ('ncurses', '6.2'), - ('binutils', '2.34') -] - -# for the termcap symbols, use EB ncurses -preconfigopts = "env LDFLAGS='-lncurses'" - -sanity_check_paths = { - 'files': ['lib/libreadline.a', 'lib/libhistory.a'] + - ['include/readline/%s' % x for x in ['chardefs.h', 'history.h', - 'keymaps.h', 'readline.h', - 'rlconf.h', 'rlstdc.h', - 'rltypedefs.h', 'tilde.h']], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/librsvg/librsvg-2.48.8-GCCcore-10.3.0.eb b/Golden_Repo/l/librsvg/librsvg-2.48.8-GCCcore-10.3.0.eb deleted file mode 100644 index 51f63710e39a301a6c56425b9fff66d38415340d..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/librsvg/librsvg-2.48.8-GCCcore-10.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'librsvg' -version = '2.48.8' - -homepage = 'https://wiki.gnome.org/action/show/Projects/LibRsvg' -description = """librsvg is a library to render SVG files using cairo.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_XZ] -source_urls = ['https://download.gnome.org/sources/librsvg/%(version_major_minor)s'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1'), -] - -dependencies = [ - ('Gdk-Pixbuf', '2.40.0'), - ('libcroco', '0.6.13'), - ('Pango', '1.44.7'), - ('cairo', '1.17.2'), - ('Rust', '1.47.0'), -] - -# This loader wants to install in the directory of Gdk-Pixbuf itself. If we disable it, Gdk-Pixbuf can't manage SVG -# files, which is bad for creating icons -# configopts = '--disable-pixbuf-loader' - -sanity_check_paths = { - 'files': ['bin/rsvg-convert', 'lib/librsvg-%%(version_major)s.%s' % SHLIB_EXT, 'lib/librsvg-2.a'], - 'dirs': ['include/librsvg-2.0', 'share'] -} - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/librsvg/librsvg-2.48.8-GCCcore-9.3.0.eb b/Golden_Repo/l/librsvg/librsvg-2.48.8-GCCcore-9.3.0.eb deleted file mode 100644 index 0a8f1e900cf96138b2ca031430d543a17ad0bb63..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/librsvg/librsvg-2.48.8-GCCcore-9.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'librsvg' -version = '2.48.8' - -homepage = 'https://wiki.gnome.org/action/show/Projects/LibRsvg' -description = """librsvg is a library to render SVG files using cairo.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_XZ] -source_urls = ['https://download.gnome.org/sources/librsvg/%(version_major_minor)s'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), - ('GObject-Introspection', '1.64.1'), -] - -dependencies = [ - ('Gdk-Pixbuf', '2.40.0'), - ('libcroco', '0.6.13'), - ('Pango', '1.44.7'), - ('cairo', '1.17.2'), - ('Rust', '1.47.0'), -] - -# This loader wants to install in the directory of Gdk-Pixbuf itself. If we disable it, Gdk-Pixbuf can't manage SVG -# files, which is bad for creating icons -# configopts = '--disable-pixbuf-loader' - -sanity_check_paths = { - 'files': ['bin/rsvg-convert', 'lib/librsvg-%%(version_major)s.%s' % SHLIB_EXT, 'lib/librsvg-2.a'], - 'dirs': ['include/librsvg-2.0', 'share'] -} - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/libsndfile/libsndfile-1.0.28-GCCcore-10.3.0.eb b/Golden_Repo/l/libsndfile/libsndfile-1.0.28-GCCcore-10.3.0.eb deleted file mode 100644 index 54f9bf82300278775824a2da69db0661f10bd9b4..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libsndfile/libsndfile-1.0.28-GCCcore-10.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libsndfile' -version = '1.0.28' - -homepage = 'http://www.mega-nerd.com/libsndfile' -description = """Libsndfile is a C library for reading and writing files -containing sampled sound (such as MS Windows WAV and the Apple/SGI AIFF format) -through one standard library interface.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://www.mega-nerd.com/libsndfile/files/'] -sources = [SOURCE_TAR_GZ] -checksums = ['1ff33929f042fa333aed1e8923aa628c3ee9e1eb85512686c55092d1e5a9dfa9'] - -builddependencies = [('binutils', '2.36.1')] - -configopts = '--enable-octave=no' - -sanity_check_paths = { - 'files': ['include/sndfile.h', 'include/sndfile.hh', 'lib/libsndfile.a', - 'lib/libsndfile.%s' % SHLIB_EXT], - 'dirs': ['bin'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libsndfile/libsndfile-1.0.28-GCCcore-9.3.0.eb b/Golden_Repo/l/libsndfile/libsndfile-1.0.28-GCCcore-9.3.0.eb deleted file mode 100644 index e6f1b6fbd94dc2b6d0fbb41d3b89675bb0fe6eeb..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libsndfile/libsndfile-1.0.28-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libsndfile' -version = '1.0.28' - -homepage = 'http://www.mega-nerd.com/libsndfile' -description = """Libsndfile is a C library for reading and writing files -containing sampled sound (such as MS Windows WAV and the Apple/SGI AIFF format) -through one standard library interface.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://www.mega-nerd.com/libsndfile/files/'] -sources = [SOURCE_TAR_GZ] -checksums = ['1ff33929f042fa333aed1e8923aa628c3ee9e1eb85512686c55092d1e5a9dfa9'] - -builddependencies = [('binutils', '2.34')] - -configopts = '--enable-octave=no' - -sanity_check_paths = { - 'files': ['include/sndfile.h', 'include/sndfile.hh', 'lib/libsndfile.a', - 'lib/libsndfile.%s' % SHLIB_EXT], - 'dirs': ['bin'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libspatialindex/libspatialindex-1.9.3-GCCcore-10.3.0.eb b/Golden_Repo/l/libspatialindex/libspatialindex-1.9.3-GCCcore-10.3.0.eb deleted file mode 100644 index 8dcdedb097f06647b096622244014773f765f4d6..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libspatialindex/libspatialindex-1.9.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libspatialindex' -version = '1.9.3' - -homepage = 'http://libspatialindex.github.io' -description = """C++ implementation of R*-tree, an MVR-tree and a TPR-tree with C API""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/libspatialindex/libspatialindex/releases/download/%(version)s/'] -sources = ['spatialindex-src-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -sanity_check_paths = { - 'files': ['lib/libspatialindex.so', 'lib/libspatialindex.%s' % SHLIB_EXT], - 'dirs': ['include/spatialindex'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libspatialindex/libspatialindex-1.9.3-GCCcore-9.3.0.eb b/Golden_Repo/l/libspatialindex/libspatialindex-1.9.3-GCCcore-9.3.0.eb deleted file mode 100644 index 72efd2469a9d0cb755a61da906e7b22d9180636a..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libspatialindex/libspatialindex-1.9.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libspatialindex' -version = '1.9.3' - -homepage = 'http://libspatialindex.github.io' -description = """C++ implementation of R*-tree, an MVR-tree and a TPR-tree with C API""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/libspatialindex/libspatialindex/releases/download/%(version)s/'] -sources = ['spatialindex-src-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -sanity_check_paths = { - 'files': ['lib/libspatialindex.so', 'lib/libspatialindex.%s' % SHLIB_EXT], - 'dirs': ['include/spatialindex'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libtirpc/libtirpc-1.3.2-GCCcore-10.3.0.eb b/Golden_Repo/l/libtirpc/libtirpc-1.3.2-GCCcore-10.3.0.eb deleted file mode 100644 index 40027bebec781945b66cbf606a66e109440915e2..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libtirpc/libtirpc-1.3.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libtirpc' -version = '1.3.2' - -homepage = 'https://sourceforge.net/projects/libtirpc/' -description = "Libtirpc is a port of Suns Transport-Independent RPC library to Linux." - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -site_contacts = 'a.kreuzter@fz-juelich.de' - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCE_TAR_BZ2] -checksums = ['e24eb88b8ce7db3b7ca6eb80115dd1284abc5ec32a8deccfed2224fc2532b9fd'] - -configopts = '--enable-static --enable-shared --disable-gssapi' - -builddependencies = [ - ('binutils', '2.36.1') -] - -sanity_check_paths = { - 'files': ['lib/libtirpc.%s' % (x,) for x in ['a', SHLIB_EXT]], - 'dirs': ['include/tirpc', 'lib'], -} - -modextrapaths = {'CPATH': 'include/tirpc'} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libtool/libtool-2.4.6-GCCcore-10.3.0.eb b/Golden_Repo/l/libtool/libtool-2.4.6-GCCcore-10.3.0.eb deleted file mode 100644 index 66b8afb6e15b2d7f87f5f0d85477d7b65292fe4d..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libtool/libtool-2.4.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libtool' -version = '2.4.6' - -homepage = 'https://www.gnu.org/software/libtool' - -description = """ -GNU libtool is a generic library support script. Libtool hides the complexity -of using shared libraries behind a consistent, portable interface. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['e3bd4d5d3d025a36c21dd6af7ea818a2afcd4dfc1ea5a17b39d7854bcd0c06e3'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['bin/libtool', 'bin/libtoolize', 'lib/libltdl.%s' % SHLIB_EXT], - 'dirs': ['include/libltdl', 'share/libtool/loaders', 'share/man/man1'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libtool/libtool-2.4.6-GCCcore-9.3.0.eb b/Golden_Repo/l/libtool/libtool-2.4.6-GCCcore-9.3.0.eb deleted file mode 100644 index 1eb1e694c28ca00fa01befb31be8d67d8d3ad9b6..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libtool/libtool-2.4.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libtool' -version = '2.4.6' - -homepage = 'http://www.gnu.org/software/libtool' -description = """GNU libtool is a generic library support script. Libtool hides the complexity of using shared libraries - behind a consistent, portable interface. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('M4', '1.4.18'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libtool/libtool-2.4.6.eb b/Golden_Repo/l/libtool/libtool-2.4.6.eb deleted file mode 100644 index dcb791391f834ee5642fc46b2113b754bca968ae..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libtool/libtool-2.4.6.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libtool' -version = '2.4.6' - -homepage = 'http://www.gnu.org/software/libtool' -description = """GNU libtool is a generic library support script. Libtool hides the complexity of using shared libraries - behind a consistent, portable interface. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('M4', '1.4.18'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libunistring/libunistring-0.9.10-GCCcore-10.3.0.eb b/Golden_Repo/l/libunistring/libunistring-0.9.10-GCCcore-10.3.0.eb deleted file mode 100644 index 8a780993cbd20f6d17b3aadd2e0df226a5beefae..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libunistring/libunistring-0.9.10-GCCcore-10.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libunistring' -version = '0.9.10' - -homepage = 'http://www.gnu.org/software/libunistring/' -description = """This library provides functions for manipulating Unicode strings and for manipulating C strings - according to the Unicode standard. -""" - -site_contacts = 'a,kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [('binutils', '2.36.1')] - -parallel = 1 - -sanity_check_paths = { - 'files': ['lib/libunistring.a', 'lib/libunistring.%s' % SHLIB_EXT] + - ['include/uni%s.h' % x for x in ['case', 'conv', 'ctype', 'lbrk', 'name', 'norm', - 'stdio', 'str', 'types', 'wbrk', 'width']], - 'dirs': ['include/unistring'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libunistring/libunistring-0.9.10-GCCcore-9.3.0.eb b/Golden_Repo/l/libunistring/libunistring-0.9.10-GCCcore-9.3.0.eb deleted file mode 100644 index 24ccdd439c6dd365ccc5e48c5e54ecca20c7423b..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libunistring/libunistring-0.9.10-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libunistring' -version = '0.9.10' - -homepage = 'http://www.gnu.org/software/libunistring/' -description = """This library provides functions for manipulating Unicode strings and for manipulating C strings - according to the Unicode standard. -""" - -site_contacts = 'a,kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -builddependencies = [('binutils', '2.34')] - -parallel = 1 - -sanity_check_paths = { - 'files': ['lib/libunistring.a', 'lib/libunistring.%s' % SHLIB_EXT] + - ['include/uni%s.h' % x for x in ['case', 'conv', 'ctype', 'lbrk', 'name', 'norm', - 'stdio', 'str', 'types', 'wbrk', 'width']], - 'dirs': ['include/unistring'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libunwind/libunwind-1.4.0-GCCcore-10.3.0.eb b/Golden_Repo/l/libunwind/libunwind-1.4.0-GCCcore-10.3.0.eb deleted file mode 100644 index 27168950d542d71833965b6f354aca3a1267441f..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libunwind/libunwind-1.4.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = "libunwind" -version = "1.4.0" - -homepage = 'http://www.nongnu.org/libunwind/' -description = """The primary goal of libunwind is to define a portable and efficient C programming interface - (API) to determine the call-chain of a program. The API additionally provides the means to manipulate the - preserved (callee-saved) state of each call-frame and to resume execution at any point in the call-chain - (non-local goto). The API supports both local (same-process) and remote (across-process) operation. - As such, the API is useful in a number of applications -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [GNU_SAVANNAH_SOURCE] - -builddependencies = [ - ('binutils', '2.36.1') -] - -dependencies = [ - ('XZ', '5.2.5') -] - -configopts = 'LDFLAGS="$LDFLAGS -Wl,-fuse-ld=bfd" ' -configopts += 'CFLAGS="-fcommon"' # Gentoo bug #706560 - -sanity_check_paths = { - 'files': ["include/libunwind.h", "lib/libunwind.%s" % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libunwind/libunwind-1.4.0-GCCcore-9.3.0.eb b/Golden_Repo/l/libunwind/libunwind-1.4.0-GCCcore-9.3.0.eb deleted file mode 100644 index 56d667948c49b0f786a82104a75ff242bf0c30d8..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libunwind/libunwind-1.4.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = "libunwind" -version = "1.4.0" - -homepage = 'http://www.nongnu.org/libunwind/' -description = """The primary goal of libunwind is to define a portable and efficient C programming interface - (API) to determine the call-chain of a program. The API additionally provides the means to manipulate the - preserved (callee-saved) state of each call-frame and to resume execution at any point in the call-chain - (non-local goto). The API supports both local (same-process) and remote (across-process) operation. - As such, the API is useful in a number of applications -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [GNU_SAVANNAH_SOURCE] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('XZ', '5.2.5') -] - -configopts = 'LDFLAGS="$LDFLAGS -Wl,-fuse-ld=bfd"' - -sanity_check_paths = { - 'files': ["include/libunwind.h", "lib/libunwind.%s" % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libvdwxc/libvdwxc-0.4.0-gpsmpi-2020.eb b/Golden_Repo/l/libvdwxc/libvdwxc-0.4.0-gpsmpi-2020.eb deleted file mode 100644 index 002d721b469dedb3b7339b6812fc57ea1f215627..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libvdwxc/libvdwxc-0.4.0-gpsmpi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libvdwxc' -version = '0.4.0' - -homepage = 'https://libvdwxc.org' -description = """libvdwxc is a general library for evaluating energy and potential for -exchange-correlation (XC) functionals from the vdW-DF family that can be used with various -of density functional theory (DFT) codes.""" - -# FFTW depends on mpi? -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://launchpad.net/libvdwxc/stable/%(version)s/+download/'] -sources = [SOURCE_TAR_GZ] -checksums = ['3524feb5bb2be86b4688f71653502146b181e66f3f75b8bdaf23dd1ae4a56b33'] - -dependencies = [ - ('FFTW', '3.3.8'), -] - -preconfigopts = 'unset CC && unset FC && ' - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['libvdwxc_fdtest', 'libvdwxc_maintest', - 'libvdwxc_q0test', 'libvdwxc_q0test2']] + - ['lib/lib%s.%s' % (x, y) for x in ['vdwxc', 'vdwxcfort'] - for y in ['a', SHLIB_EXT]], - 'dirs': ['include'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libvpx/libvpx-1.9.0-GCCcore-10.3.0.eb b/Golden_Repo/l/libvpx/libvpx-1.9.0-GCCcore-10.3.0.eb deleted file mode 100644 index 8b36f1d32f9e80c313a62017f68fd8f55b6edd5c..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libvpx/libvpx-1.9.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -# Built with EasyBuild version 4.4.0 on 2021-06-21_15-43-00 -easyblock = 'ConfigureMake' - -name = 'libvpx' -version = '1.9.0' - -homepage = 'http://www.webmproject.org' -description = """VPx are open and royalty free video compression formats owned by Google. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/webmproject/libvpx/archive/v%(version)s/'] -sources = ['%(name)s-%(version)s.tar.gz'] -checksums = ['d279c10e4b9316bf11a570ba16c3d55791e1ad6faa4404c67422eb631782c80a'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), - ('NASM', '2.15.03'), -] - -dependencies = [ -] - -configopts = '--enable-pic --enable-shared' - -sanity_check_paths = { - # 'lib/libvpx.%s' % SHLIB_EXT], - 'files': ['bin/vpxdec', 'bin/vpxenc', 'include/vpx/vpx_codec.h', 'lib/libvpx.a'], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/libvpx/libvpx-1.9.0-GCCcore-9.3.0.eb b/Golden_Repo/l/libvpx/libvpx-1.9.0-GCCcore-9.3.0.eb deleted file mode 100644 index 3b1e192d60aa395a12fe695e7318d7fce4406915..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libvpx/libvpx-1.9.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libvpx' -version = '1.9.0' - -homepage = 'http://www.webmproject.org' -description = """VPx are open and royalty free video compression formats owned by Google. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/webmproject/libvpx/archive/v%(version)s/'] -sources = ['%(name)s-%(version)s.tar.gz'] -checksums = ['d279c10e4b9316bf11a570ba16c3d55791e1ad6faa4404c67422eb631782c80a'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), - ('NASM', '2.15.03'), -] - -dependencies = [ -] - -configopts = '--enable-pic --enable-shared' - -sanity_check_paths = { - 'files': ['bin/vpxdec', 'bin/vpxenc', 'include/vpx/vpx_codec.h', 'lib/libvpx.a'], # 'lib/libvpx.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/libwebp/libwebp-1.1.0-GCCcore-10.3.0.eb b/Golden_Repo/l/libwebp/libwebp-1.1.0-GCCcore-10.3.0.eb deleted file mode 100644 index 50cd2b6f4957b728f2cbf795e4a13f83697f909a..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libwebp/libwebp-1.1.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libwebp' -version = '1.1.0' - -homepage = 'https://developers.google.com/speed/webp/' -description = """WebP is a modern image format that provides superior -lossless and lossy compression for images on the web. Using WebP, -webmasters and web developers can create smaller, richer images that -make the web faster.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://storage.googleapis.com/downloads.webmproject.org/releases/webp'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['98a052268cc4d5ece27f76572a7f50293f439c17a98e67c4ea0c7ed6f50ef043'] - -builddependencies = [('binutils', '2.36.1')] - -dependencies = [ - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), -] - -configopts = "--enable-libwebpmux" - -sanity_check_paths = { - 'files': ['include/webp/%s' % f for f in ['decode.h', 'demux.h', 'encode.h', 'mux.h', 'mux_types.h', 'types.h']] + - ['lib/lib%s.a' % l for l in ['webp', 'webpdemux', 'webpmux']] + - ['lib/lib%s.%s' % (l, SHLIB_EXT) for l in ['webp', 'webpdemux', 'webpmux']], - 'dirs': ['lib/'] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libwebp/libwebp-1.1.0-GCCcore-9.3.0.eb b/Golden_Repo/l/libwebp/libwebp-1.1.0-GCCcore-9.3.0.eb deleted file mode 100644 index cdeba8b3bd4190a365734a9df4c933a5e46bbfc6..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libwebp/libwebp-1.1.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libwebp' -version = '1.1.0' - -homepage = 'https://developers.google.com/speed/webp/' -description = """WebP is a modern image format that provides superior -lossless and lossy compression for images on the web. Using WebP, -webmasters and web developers can create smaller, richer images that -make the web faster.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://storage.googleapis.com/downloads.webmproject.org/releases/webp'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['98a052268cc4d5ece27f76572a7f50293f439c17a98e67c4ea0c7ed6f50ef043'] - -builddependencies = [('binutils', '2.34')] - -dependencies = [ - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), -] - -configopts = "--enable-libwebpmux" - -sanity_check_paths = { - 'files': ['include/webp/%s' % f for f in ['decode.h', 'demux.h', 'encode.h', 'mux.h', 'mux_types.h', 'types.h']] + - ['lib/lib%s.a' % l for l in ['webp', 'webpdemux', 'webpmux']] + - ['lib/lib%s.%s' % (l, SHLIB_EXT) for l in ['webp', 'webpdemux', 'webpmux']], - 'dirs': ['lib/'] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libxc/README.md b/Golden_Repo/l/libxc/README.md deleted file mode 100644 index 8b554daccd9029e27a209691fdab65caa674ff1d..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxc/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# libxc - -The default version of `libxc` in the stage `2020` is `4.3.4`. We decided against the newer version `libxc/5.0.0`, as some applications would need to be patched. However, also not every application is working with version `4.3.4`, as the API changed between version 3 and 4. This lead to the fact that we need two different version: - -- `libxc/3.0.1` -- `libxc/4.3.4` - -Both versions are installed for the following toolchains: - -- `GCC/9.3.0` -- `iccifort-2020.2.254-GCC-9.3.0` - -## ABINIT - -`ABINIT/8.X` is only working with `libxc/3.0.1` due to changes in the API. Starting with `ABINIT/9.X` version 4.3.0 or later is supported. - -## Quantum ESPRESSO - -While Quantum ESPRESSO 6.6 would work with `libxc/5.0.0`, QE would require a patch. From the Quantum ESPRESSO User Guide (https://www.quantum-espresso.org/Doc/user_guide.pdf): - -**Note for version 5.0.0:** the `f03` interfaces are no longer available in `libxc` 5.0.0. They have been reintroduced in the current develop version. Version 5.0.0 is still usable, but, before compiling Quantum ESPRESSO, a string replacement is necessary, namely `‘xcf03’` must berepalced with `‘xcf90’` everywhere in the following files: `funct.f90, xcldalsdadrivers.f90, xcggadrivers.f90, xcmggadrivers.f90, dmxcdrivers.f90` and `dgcxcdrivers.f90` in `Modules` folder and `xctestqelibxc.f90` in `PP/src` folder. - -## Note for future Stage 2021 - -Check if all application can work with the newest version of `libxc`, e.g. 5.0.0. One common version would be desirable. diff --git a/Golden_Repo/l/libxc/libxc-3.0.1-GCC-9.3.0.eb b/Golden_Repo/l/libxc/libxc-3.0.1-GCC-9.3.0.eb deleted file mode 100644 index c50654fe0634458014e8daea9605e157abf00352..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxc/libxc-3.0.1-GCC-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxc' -version = '3.0.1' - -homepage = 'http://www.tddft.org/programs/octopus/wiki/index.php/Libxc' -description = """ -Libxc is a library of exchange-correlation functionals for density-functional theory. -The aim is to provide a portable, well tested and reliable set of exchange and correlation functionals. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -# Results for some functionals (e.g. mgga_c_tpss) deviate with too aggressive optimization settings. -# Tests also fail with Intel Compilers on Haswell when optarch is enabled. -toolchainopts = {'lowopt': True, 'optarch': False} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.tddft.org/programs/octopus/down.php?file=libxc/%(version)s/'] - -configopts = '--enable-static --enable-shared --enable-fortran' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libxc%s.%s' % (x, y) for x in ['', 'f90'] for y in ['a', SHLIB_EXT]], - 'dirs': ['include'], -} - -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libxc/libxc-3.0.1-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/l/libxc/libxc-3.0.1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 137dabaf5efaecb0ed6053909c3c6beb410cc736..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxc/libxc-3.0.1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxc' -version = '3.0.1' - -homepage = 'http://www.tddft.org/programs/octopus/wiki/index.php/Libxc' -description = """ -Libxc is a library of exchange-correlation functionals for density-functional theory. -The aim is to provide a portable, well tested and reliable set of exchange and correlation functionals. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -# Results for some functionals (e.g. mgga_c_tpss) deviate with too aggressive optimization settings. -# Tests also fail with Intel Compilers on Haswell when optarch is enabled. -toolchainopts = {'lowopt': True, 'optarch': False} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.tddft.org/programs/octopus/down.php?file=libxc/%(version)s/'] - -configopts = '--enable-static --enable-shared --enable-fortran' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libxc%s.%s' % (x, y) for x in ['', 'f90'] for y in ['a', SHLIB_EXT]], - 'dirs': ['include'], -} - -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libxc/libxc-4.3.4-GCC-10.3.0.eb b/Golden_Repo/l/libxc/libxc-4.3.4-GCC-10.3.0.eb deleted file mode 100644 index 11d132b8952b771911f295342fe9160fb4fda7b7..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxc/libxc-4.3.4-GCC-10.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxc' -version = '4.3.4' - -homepage = 'http://www.tddft.org/programs/octopus/wiki/index.php/Libxc' -description = """Libxc is a library of exchange-correlation functionals for density-functional theory. - The aim is to provide a portable, well tested and reliable set of exchange and correlation functionals. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -# Results for some functionals (e.g. mgga_c_tpss) deviate with too aggressive optimization settings. -# Tests also fail with Intel Compilers on Haswell when optarch is enabled. -toolchainopts = {'lowopt': True, 'optarch': False} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.tddft.org/programs/libxc/down.php?file=%(version)s/'] - -configopts = '--enable-static --enable-shared --enable-fortran' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libxc%s.%s' % (x, y) for x in ['', 'f90'] for y in ['a', SHLIB_EXT]], - 'dirs': ['include'], -} - -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libxc/libxc-4.3.4-GCC-9.3.0.eb b/Golden_Repo/l/libxc/libxc-4.3.4-GCC-9.3.0.eb deleted file mode 100644 index 4722ebdd9309cc9fadc0b44e3d32729ef9f4d60b..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxc/libxc-4.3.4-GCC-9.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxc' -version = '4.3.4' - -homepage = 'http://www.tddft.org/programs/octopus/wiki/index.php/Libxc' -description = """Libxc is a library of exchange-correlation functionals for density-functional theory. - The aim is to provide a portable, well tested and reliable set of exchange and correlation functionals. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -# Results for some functionals (e.g. mgga_c_tpss) deviate with too aggressive optimization settings. -# Tests also fail with Intel Compilers on Haswell when optarch is enabled. -toolchainopts = {'lowopt': True, 'optarch': False} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.tddft.org/programs/octopus/down.php?file=libxc/%(version)s/'] - -configopts = '--enable-static --enable-shared --enable-fortran' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libxc%s.%s' % (x, y) for x in ['', 'f90'] for y in ['a', SHLIB_EXT]], - 'dirs': ['include'], -} - -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libxc/libxc-4.3.4-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/l/libxc/libxc-4.3.4-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 05ad5d82b5f89ccc8f9f6496573cf40761c96f80..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxc/libxc-4.3.4-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxc' -version = '4.3.4' - -homepage = 'http://www.tddft.org/programs/octopus/wiki/index.php/Libxc' -description = """Libxc is a library of exchange-correlation functionals for density-functional theory. - The aim is to provide a portable, well tested and reliable set of exchange and correlation functionals. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -# Results for some functionals (e.g. mgga_c_tpss) deviate with too aggressive optimization settings. -# Tests also fail with Intel Compilers on Haswell when optarch is enabled. -toolchainopts = {'lowopt': True, 'optarch': False} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.tddft.org/programs/octopus/down.php?file=libxc/%(version)s/'] - -configopts = '--enable-static --enable-shared --enable-fortran' - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libxc%s.%s' % (x, y) for x in ['', 'f90'] for y in ['a', SHLIB_EXT]], - 'dirs': ['include'], -} - -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libxc/libxc-5.1.5-GCC-10.3.0.eb b/Golden_Repo/l/libxc/libxc-5.1.5-GCC-10.3.0.eb deleted file mode 100644 index ef9b1ce4104dcb7bd13d8f70cd6cc20d331e5621..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxc/libxc-5.1.5-GCC-10.3.0.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libxc' -version = '5.1.5' - -homepage = 'https://www.tddft.org/programs/libxc' -description = """Libxc is a library of exchange-correlation functionals for density-functional theory. - The aim is to provide a portable, well tested and reliable set of exchange and correlation functionals. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} - -source_urls = [ - 'https://www.tddft.org/programs/libxc/down.php?file=%(version)s/'] -sources = [SOURCE_TAR_GZ] -checksums = ['02e4615a22dc3ec87a23efbd3d9be5bfad2445337140bad1720699571c45c3f9'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Perl', '5.32.0'), -] - -separate_build_dir = True - -local_common_configopts = "-DENABLE_FORTRAN=ON -DENABLE_FORTRAN03=ON -DENABLE_XHOST=OFF" - -# perform iterative build to get both static and shared libraries -configopts = [ - local_common_configopts + ' -DBUILD_SHARED_LIBS=OFF', - local_common_configopts + ' -DBUILD_SHARED_LIBS=ON', -] - -parallel = 1 - -# make sure that built libraries (libxc*.so*) in build directory are picked when running tests -# this is required when RPATH linking is used -pretestopts = "export LD_LIBRARY_PATH=%(builddir)s/easybuild_obj:$LD_LIBRARY_PATH && " - -runtest = 'test' - -sanity_check_paths = { - 'files': ['bin/xc-info'] + - ['lib/libxc%s.%s' % (x, y) for x in ['', 'f03', 'f90'] - for y in ['a', SHLIB_EXT]], - 'dirs': ['include', 'lib/pkgconfig', 'share/cmake/Libxc'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libxc/libxc-5.1.5-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/l/libxc/libxc-5.1.5-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index fd896237c5c0d79795731a4b76a6ba18617e00c6..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxc/libxc-5.1.5-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libxc' -version = '5.1.5' - -homepage = 'https://www.tddft.org/programs/libxc' -description = """Libxc is a library of exchange-correlation functionals for density-functional theory. - The aim is to provide a portable, well tested and reliable set of exchange and correlation functionals. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -source_urls = [ - 'https://www.tddft.org/programs/libxc/down.php?file=%(version)s/'] -sources = [SOURCE_TAR_GZ] -checksums = ['02e4615a22dc3ec87a23efbd3d9be5bfad2445337140bad1720699571c45c3f9'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Perl', '5.32.0'), -] - -separate_build_dir = True - -local_common_configopts = "-DENABLE_FORTRAN=ON -DENABLE_FORTRAN03=ON -DENABLE_XHOST=OFF" - -# perform iterative build to get both static and shared libraries -configopts = [ - local_common_configopts + ' -DBUILD_SHARED_LIBS=OFF', - local_common_configopts + ' -DBUILD_SHARED_LIBS=ON', -] - -parallel = 1 - -# make sure that built libraries (libxc*.so*) in build directory are picked when running tests -# this is required when RPATH linking is used -pretestopts = "export LD_LIBRARY_PATH=%(builddir)s/easybuild_obj:$LD_LIBRARY_PATH && " - -runtest = 'test' - -sanity_check_paths = { - 'files': ['bin/xc-info'] + - ['lib/libxc%s.%s' % (x, y) for x in ['', 'f03', 'f90'] - for y in ['a', SHLIB_EXT]], - 'dirs': ['include', 'lib/pkgconfig', 'share/cmake/Libxc'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/l/libxml2-python/libxml2-python-2.9.10-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/l/libxml2-python/libxml2-python-2.9.10-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 0e6ecc1e6bd10d845998d82ffe96488823bb8abd..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxml2-python/libxml2-python-2.9.10-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,49 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'libxml2-python' -version = '2.9.10' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://xmlsoft.org/' -description = """ - Libxml2 is the XML C parser and toolchain developed for the Gnome project - (but usable outside of the Gnome platform). This is the Python binding.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'http://xmlsoft.org/sources/', - 'http://xmlsoft.org/sources/old/' -] -sources = ['libxml2-%(version)s.tar.gz'] -patches = ['libxml2-2.9.7_fix-hardcoded-paths.patch'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('Python', '3.8.5'), - ('libxml2', version), - ('libiconv', '1.16'), -] - -start_dir = 'python' - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -options = {'modulename': 'libxml2'} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libxml2-python/libxml2-python-2.9.10-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/l/libxml2-python/libxml2-python-2.9.10-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 164eaf98db098b8726718091f7f8562192f02b47..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxml2-python/libxml2-python-2.9.10-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,49 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'libxml2-python' -version = '2.9.10' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://xmlsoft.org/' -description = """ - Libxml2 is the XML C parser and toolchain developed for the Gnome project - (but usable outside of the Gnome platform). This is the Python binding.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'http://xmlsoft.org/sources/', - 'http://xmlsoft.org/sources/old/' -] -sources = ['libxml2-%(version)s.tar.gz'] -patches = ['libxml2-2.9.7_fix-hardcoded-paths.patch'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('Python', '3.8.5'), - ('libxml2', version), - ('libiconv', '1.16'), -] - -start_dir = 'python' - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -options = {'modulename': 'libxml2'} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libxml2/libxml2-2.9.10-GCCcore-10.3.0.eb b/Golden_Repo/l/libxml2/libxml2-2.9.10-GCCcore-10.3.0.eb deleted file mode 100644 index 075ef1f277a74672580c62bc6c2ca099a34646fb..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxml2/libxml2-2.9.10-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'libxml2' -version = '2.9.10' - -homepage = 'http://xmlsoft.org/' - -description = """ -Libxml2 is the XML C parser and toolchain developed for the Gnome project -(but usable outside of the Gnome platform). -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'http://xmlsoft.org/sources/', - 'http://xmlsoft.org/sources/old/' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['aafee193ffb8fe0c82d4afef6ef91972cbaf5feea100edc2f262750611b4be1f'] - -builddependencies = [('binutils', '2.36.1')] - -dependencies = [ - ('XZ', '5.2.5'), - ('zlib', '1.2.11'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libxml2/libxml2-2.9.10-GCCcore-9.3.0.eb b/Golden_Repo/l/libxml2/libxml2-2.9.10-GCCcore-9.3.0.eb deleted file mode 100644 index a6be0eded8f7c808221183bd876f5bc723572dc1..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxml2/libxml2-2.9.10-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'libxml2' -version = '2.9.10' - -homepage = 'http://xmlsoft.org/' - -description = """ -Libxml2 is the XML C parser and toolchain developed for the Gnome project -(but usable outside of the Gnome platform). -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'http://xmlsoft.org/sources/', - 'http://xmlsoft.org/sources/old/' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['aafee193ffb8fe0c82d4afef6ef91972cbaf5feea100edc2f262750611b4be1f'] - -builddependencies = [('binutils', '2.34')] - -dependencies = [ - ('XZ', '5.2.5'), - ('zlib', '1.2.11'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libxslt/libxslt-1.1.34-GCCcore-10.3.0.eb b/Golden_Repo/l/libxslt/libxslt-1.1.34-GCCcore-10.3.0.eb deleted file mode 100644 index a52657348ed0d0164fa395a6567e0a557c632c29..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxslt/libxslt-1.1.34-GCCcore-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxslt' -version = '1.1.34' - -homepage = 'http://xmlsoft.org/' -description = """Libxslt is the XSLT C library developed for the GNOME project -(but usable outside of the Gnome platform). -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'http://xmlsoft.org/sources/', - 'http://xmlsoft.org/sources/old/' -] - -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('libxml2', '2.9.10'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libxslt/libxslt-1.1.34-GCCcore-9.3.0.eb b/Golden_Repo/l/libxslt/libxslt-1.1.34-GCCcore-9.3.0.eb deleted file mode 100644 index 653e879d4b46835680e2f525153bf19c42d632b5..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxslt/libxslt-1.1.34-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxslt' -version = '1.1.34' - -homepage = 'http://xmlsoft.org/' -description = """Libxslt is the XSLT C library developed for the GNOME project -(but usable outside of the Gnome platform). -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [ - 'http://xmlsoft.org/sources/', - 'http://xmlsoft.org/sources/old/' -] - -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('libxml2', '2.9.10'), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libxsmm/libxsmm-1.16.1-GCC-9.3.0.eb b/Golden_Repo/l/libxsmm/libxsmm-1.16.1-GCC-9.3.0.eb deleted file mode 100644 index 89152e411f290f6a40395301a61b0b8e813ecd14..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxsmm/libxsmm-1.16.1-GCC-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxsmm' -version = '1.16.1' - -homepage = 'https://github.com/hfp/libxsmm' -description = """LIBXSMM is a library for small dense and small sparse matrix-matrix multiplications -targeting Intel Architecture (x86).""" - -site_contacts = "sc@fz-juelich.de" - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/hfp/libxsmm/archive/'] - -dependencies = [ - ('imkl', '2020.2.254', '', SYSTEM), -] - -# install both static and dynamic version -installopts = ['PREFIX=%(installdir)s', 'PREFIX=%(installdir)s STATIC=0'] - -skipsteps = ['configure'] -maxparallel = 1 - -runtest = "STATIC=0 test" - -sanity_check_paths = { - 'files': ['bin/libxsmm_gemm_generator', 'include/libxsmm.h', 'lib/libxsmm.a', 'lib/libxsmm.%s' % SHLIB_EXT], - 'dirs': ['share'] -} - -moduleclass = 'math' diff --git a/Golden_Repo/l/libxsmm/libxsmm-1.16.1-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/l/libxsmm/libxsmm-1.16.1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index e424c8064e7b6844838cacdf2ad6b43828fb3615..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libxsmm/libxsmm-1.16.1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libxsmm' -version = '1.16.1' - -homepage = 'https://github.com/hfp/libxsmm' -description = """LIBXSMM is a library for small dense and small sparse matrix-matrix multiplications -targeting Intel Architecture (x86).""" - -site_contacts = "sc@fz-juelich.de" - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/hfp/libxsmm/archive/'] - -dependencies = [ - ('imkl', '2020.2.254', '', SYSTEM), -] - -# install both static and dynamic version -installopts = ['PREFIX=%(installdir)s', 'PREFIX=%(installdir)s STATIC=0'] - -skipsteps = ['configure'] -maxparallel = 1 - -runtest = "STATIC=0 test" - -sanity_check_paths = { - 'files': ['bin/libxsmm_gemm_generator', 'include/libxsmm.h', 'lib/libxsmm.a', 'lib/libxsmm.%s' % SHLIB_EXT], - 'dirs': ['share'] -} - -moduleclass = 'math' diff --git a/Golden_Repo/l/libyaml/libyaml-0.2.5-GCCcore-10.3.0.eb b/Golden_Repo/l/libyaml/libyaml-0.2.5-GCCcore-10.3.0.eb deleted file mode 100644 index e48864bc8899063e4028d64727066e23c345449a..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libyaml/libyaml-0.2.5-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Nils Christian <nils.christian@uni.lu> -# License:: MIT/GPL -# $Id$ -## - -easyblock = 'ConfigureMake' - -name = 'libyaml' -version = '0.2.5' - -homepage = 'http://pyyaml.org/wiki/LibYAML' -description = """LibYAML is a YAML 1.1 parser and emitter written in C. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = ['yaml-%(version)s.tar.gz'] -source_urls = ['http://pyyaml.org/download/libyaml/'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), -] - -sanity_check_paths = { - 'files': ["include/yaml.h", "lib/libyaml.a", "lib/libyaml.%s" % SHLIB_EXT], - 'dirs': ["lib/pkgconfig"] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libyaml/libyaml-0.2.5-GCCcore-9.3.0.eb b/Golden_Repo/l/libyaml/libyaml-0.2.5-GCCcore-9.3.0.eb deleted file mode 100644 index 5a2846213c635f78ffe1e27b004841b7ab4967ab..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libyaml/libyaml-0.2.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Nils Christian <nils.christian@uni.lu> -# License:: MIT/GPL -# $Id$ -## - -easyblock = 'ConfigureMake' - -name = 'libyaml' -version = '0.2.5' - -homepage = 'http://pyyaml.org/wiki/LibYAML' -description = """LibYAML is a YAML 1.1 parser and emitter written in C. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = ['yaml-%(version)s.tar.gz'] -source_urls = ['http://pyyaml.org/download/libyaml/'] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), -] - -sanity_check_paths = { - 'files': ["include/yaml.h", "lib/libyaml.a", "lib/libyaml.%s" % SHLIB_EXT], - 'dirs': ["lib/pkgconfig"] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/libyuv/libyuv-20201024-GCCcore-10.3.0.eb b/Golden_Repo/l/libyuv/libyuv-20201024-GCCcore-10.3.0.eb deleted file mode 100644 index c0e7c9916b59e30e5915f4862ecb61590a1f485f..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libyuv/libyuv-20201024-GCCcore-10.3.0.eb +++ /dev/null @@ -1,55 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libyuv' -version = '20201024' - -homepage = 'https://chromium.googlesource.com/libyuv/libyuv/' -description = """ -libyuv for colorspace conversion. libyuv is Optimized for SSE2/SSSE3/AVX2 on x86/x64. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -sources = [ - { - 'source_urls': ['https://chromium.googlesource.com/libyuv/libyuv/+archive/'], - 'filename': '%(name)s-19d71f6b351fe992ae34b114eebd872c383a6bdb.tar.gz', - 'download_filename': '19d71f6b351fe992ae34b114eebd872c383a6bdb.tar.gz', - }, { - 'filename': 'libyuv.pc.gz', - } -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM) -] - -dependencies = [ -] - -separate_build_dir = True -start_dir = './' - -maxparallel = 12 - -configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' - -postinstallcmds = [ - 'sed -ie "s#PREFIX_TEMPLATE#%(installdir)s#g" %(builddir)s/libyuv.pc', - 'cp %(builddir)s/libyuv.pc %(installdir)s' -] - -sanity_check_paths = { - 'files': ['lib/libyuv.a', 'include/libyuv.h'], - 'dirs': [], -} - -modextrapaths = { - 'PKG_CONFIG_PATH': '' -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/libyuv/libyuv-20201024-GCCcore-9.3.0.eb b/Golden_Repo/l/libyuv/libyuv-20201024-GCCcore-9.3.0.eb deleted file mode 100644 index de42aaaccd80dd8908ae269ca8328d4db9eaa84f..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/libyuv/libyuv-20201024-GCCcore-9.3.0.eb +++ /dev/null @@ -1,56 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'libyuv' -version = '20201024' - -homepage = 'https://chromium.googlesource.com/libyuv/libyuv/' -description = """ -libyuv for colorspace conversion. libyuv is Optimized for SSE2/SSSE3/AVX2 on x86/x64. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [ - { - 'source_urls': ['https://chromium.googlesource.com/libyuv/libyuv/+archive/'], - 'filename': '%(name)s-19d71f6b351fe992ae34b114eebd872c383a6bdb.tar.gz', - 'download_filename': '19d71f6b351fe992ae34b114eebd872c383a6bdb.tar.gz', - }, { - 'filename': 'libyuv.pc.gz', - } -] -checksums = ['3796b5b96cc0c5987b6d4d6e52a726de6277623d900937f100a6b0cffd3adda8'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0') -] - -dependencies = [ -] - -separate_build_dir = True -start_dir = './' - -maxparallel = 12 - -configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' - -postinstallcmds = [ - 'sed -ie "s#PREFIX_TEMPLATE#%(installdir)s#g" %(builddir)s/libyuv.pc', - 'cp %(builddir)s/libyuv.pc %(installdir)s' -] - -sanity_check_paths = { - 'files': ['lib/libyuv.a', 'include/libyuv.h'], - 'dirs': [], -} - -modextrapaths = { - 'PKG_CONFIG_PATH': '' -} - -moduleclass = 'vis' diff --git a/Golden_Repo/l/libyuv/libyuv.pc.gz b/Golden_Repo/l/libyuv/libyuv.pc.gz deleted file mode 100644 index 401bc3c4d99707e1392a2d2cd279466259a38380..0000000000000000000000000000000000000000 Binary files a/Golden_Repo/l/libyuv/libyuv.pc.gz and /dev/null differ diff --git a/Golden_Repo/l/likwid/likwid-5.0.2-GCCcore-9.3.0.eb b/Golden_Repo/l/likwid/likwid-5.0.2-GCCcore-9.3.0.eb deleted file mode 100644 index 8b6ccf27a4f978f28d6c394dd070812b62058859..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/likwid/likwid-5.0.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'likwid' -version = '5.0.2' - -homepage = 'http://code.google.com/p/likwid/' - -description = """ -Likwid stands for Like I knew what I am doing. This project contributes easy -to use command line tools for Linux to support programmers in developing high -performance multi threaded programs. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -# https://github.com/RRZE-HPC/likwid/archive/v5.0.2.tar.gz -source_urls = ['https://github.com/RRZE-HPC/likwid/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['0a1c8984e4b43ea8b99d09456ef05035eb934594af1669432117585c638a2da4'] - -builddependencies = [ - ('binutils', '2.34'), - ('Perl', '5.32.0'), -] - -skipsteps = ['configure'] - -buildopts = 'CC="$CC" CFLAGS="$CFLAGS -std=c99" PREFIX=%(installdir)s BUILDFREQ="" ACCESSMODE=perf_event ' -buildopts += 'CFG_FILE_PATH=%(installdir)s/etc/likwid.cfg TOPO_FILE_PATH=%(installdir)s/etc/likwid_topo.cfg' - -maxparallel = 1 - -installopts = 'PREFIX=%(installdir)s INSTALL_CHOWN="" BUILDFREQ="" ACCESSMODE=perf_event ' - -sanity_check_paths = { - 'files': ["bin/likwid-memsweeper", "bin/likwid-mpirun", "bin/likwid-perfctr", - "bin/likwid-perfscope", "bin/likwid-pin", "bin/likwid-powermeter", - "bin/likwid-topology", "lib/liblikwidpin.%s" % SHLIB_EXT, - "lib/liblikwid.%s" % SHLIB_EXT], - 'dirs': ["man/man1"] -} - -moduleclass = 'devel' diff --git a/Golden_Repo/l/likwid/likwid-5.1.0-GCCcore-9.3.0.eb b/Golden_Repo/l/likwid/likwid-5.1.0-GCCcore-9.3.0.eb deleted file mode 100644 index 78fa331f7dbc988e20917f023a60e9deb6a7b494..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/likwid/likwid-5.1.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'likwid' -version = '5.1.0' - -homepage = 'https://github.com/RRZE-HPC/likwid' - -description = """ -Likwid stands for Like I knew what I am doing. This project contributes easy -to use command line tools for Linux to support programmers in developing high -performance multi threaded programs. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/RRZE-HPC/likwid/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['5a180702a1656c6315b861a85031ab4cb090424aec42cbbb326b849e29f55571'] - -builddependencies = [ - ('binutils', '2.34'), - ('Perl', '5.32.0'), -] - -skipsteps = ['configure'] - -# include_GCC.mk is using ifort by default. -# Changing it to gfortran, to be consistent with GCCcore toolchain -prebuildopts = "sed -i 's#FC = ifort#FC = gfortran#g' make/include_GCC.mk && " -prebuildopts += "sed -i 's#FCFLAGS = -module ./ \# ifort#FCFLAGS = -J ./ -fsyntax-only \#gfortran#g' " -prebuildopts += " make/include_GCC.mk && " - -buildopts = 'CC="$CC" CFLAGS="$CFLAGS -std=c99" PREFIX=%(installdir)s BUILDFREQ="" ACCESSMODE=perf_event ' -buildopts += 'FORTRAN_INTERFACE=true ' -buildopts += 'CFG_FILE_PATH=%(installdir)s/etc/likwid.cfg TOPO_FILE_PATH=%(installdir)s/etc/likwid_topo.cfg' - -maxparallel = 1 - -installopts = 'PREFIX=%(installdir)s INSTALL_CHOWN="" BUILDFREQ="" ACCESSMODE=perf_event ' - -sanity_check_paths = { - 'files': ['bin/likwid-memsweeper', 'bin/likwid-mpirun', 'bin/likwid-perfctr', - 'bin/likwid-perfscope', 'bin/likwid-pin', 'bin/likwid-powermeter', - 'bin/likwid-topology', 'lib/liblikwidpin.%s' % SHLIB_EXT, - 'lib/liblikwid.%s' % SHLIB_EXT], - 'dirs': ['man/man1'] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/l/likwid/likwid-5.2.0-GCCcore-10.3.0.eb b/Golden_Repo/l/likwid/likwid-5.2.0-GCCcore-10.3.0.eb deleted file mode 100644 index 9b61fb7811dbda9c46468158f67957d3600a9455..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/likwid/likwid-5.2.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'likwid' -version = '5.2.0' - -homepage = 'https://github.com/RRZE-HPC/likwid' - -description = """ -Likwid stands for Like I knew what I am doing. This project contributes easy -to use command line tools for Linux to support programmers in developing high -performance multi threaded programs. -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/RRZE-HPC/likwid/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['aa6dccacfca59e52d8f3be187ffcf292b2a2fa1f51a81bf8912b9d48e5a257e0'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Perl', '5.32.0'), -] - -skipsteps = ['configure'] - -# include_GCC.mk is using ifort by default. -# Changing it to gfortran, to be consistent with GCCcore toolchain -prebuildopts = "sed -i 's#FC = ifort#FC = gfortran#g' make/include_GCC.mk && " -prebuildopts += "sed -i 's#FCFLAGS = -module ./ \# ifort#FCFLAGS = -J ./ -fsyntax-only \#gfortran#g' " -prebuildopts += " make/include_GCC.mk && " - -buildopts = 'CC="$CC" CFLAGS="$CFLAGS -std=c99" PREFIX=%(installdir)s BUILDFREQ="" ACCESSMODE=perf_event ' -buildopts += 'FORTRAN_INTERFACE=true ' -buildopts += 'CFG_FILE_PATH=%(installdir)s/etc/likwid.cfg TOPO_FILE_PATH=%(installdir)s/etc/likwid_topo.cfg' - -maxparallel = 1 - -installopts = 'PREFIX=%(installdir)s INSTALL_CHOWN="" BUILDFREQ="" ACCESSMODE=perf_event ' - -sanity_check_paths = { - 'files': ['bin/likwid-memsweeper', 'bin/likwid-mpirun', 'bin/likwid-perfctr', - 'bin/likwid-perfscope', 'bin/likwid-pin', 'bin/likwid-powermeter', - 'bin/likwid-topology', 'lib/liblikwidpin.%s' % SHLIB_EXT, - 'lib/liblikwid.%s' % SHLIB_EXT], - 'dirs': ['man/man1'] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/l/lz4/lz4-1.9.3-GCCcore-10.3.0.eb b/Golden_Repo/l/lz4/lz4-1.9.3-GCCcore-10.3.0.eb deleted file mode 100644 index 3d6b2aa1e98bb87e6102cb1332b3970185d0fdfe..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/lz4/lz4-1.9.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'lz4' -version = '1.9.3' - -homepage = 'https://lz4.github.io/lz4/' -description = """LZ4 is lossless compression algorithm, providing compression speed at 400 MB/s per core. - It features an extremely fast decoder, with speed in multiple GB/s per core.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -github_account = '%(name)s' -source_urls = [GITHUB_SOURCE] -sources = ['v%(version)s.tar.gz'] -checksums = ['030644df4611007ff7dc962d981f390361e6c97a34e5cbc393ddfbe019ffe2c1'] - -builddependencies = [('binutils', '2.36.1')] - -skipsteps = ['configure'] - -installopts = "PREFIX=%(installdir)s" - -runtest = 'check' - -sanity_check_paths = { - 'files': ["bin/lz4", "lib/liblz4.%s" % SHLIB_EXT, "include/lz4.h"], - 'dirs': ["lib/pkgconfig"] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/lz4/lz4-3.1.0-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/l/lz4/lz4-3.1.0-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 633abd566b9eb2665b6fbb102070dd7870897bb9..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/lz4/lz4-3.1.0-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'lz4' -version = '3.1.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/lz4' -description = """ -LZ4 is lossless compression algorithm, providing compression speed > 500 MB/s per core, -scalable with multi-cores CPU. It features an extremely fast decoder, with speed in -multiple GB/s per core, typically reaching RAM speed limits on multi-core systems. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [PYPI_SOURCE] - -builddependencies = [ - ('binutils', '2.36.1') -] - -dependencies = [ - ('Python', '3.8.5'), -] - -maxparallel = 12 - -start_dir = '%(builddir)s/%(name)s-%(version)s/' - -options = {'modulename': 'lz4'} - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/l/lz4/lz4-3.1.0-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/l/lz4/lz4-3.1.0-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index c9c8b0cfc3ab5528eebff53b059b7641e238d20c..0000000000000000000000000000000000000000 --- a/Golden_Repo/l/lz4/lz4-3.1.0-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'lz4' -version = '3.1.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/lz4' -description = """ -LZ4 is lossless compression algorithm, providing compression speed > 500 MB/s per core, -scalable with multi-cores CPU. It features an extremely fast decoder, with speed in -multiple GB/s per core, typically reaching RAM speed limits on multi-core systems. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [PYPI_SOURCE] - -dependencies = [ - ('Python', '3.8.5'), -] - -maxparallel = 12 - -start_dir = '%(builddir)s/%(name)s-%(version)s/' - -options = {'modulename': 'lz4'} - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/m/M4/M4-1.4.18-GCCcore-10.3.0.eb b/Golden_Repo/m/M4/M4-1.4.18-GCCcore-10.3.0.eb deleted file mode 100644 index 4eedb56b0119f3fa9ca9dd23224f58d3dd23104b..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/M4/M4-1.4.18-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'M4' -version = '1.4.18' - -homepage = 'https://www.gnu.org/software/m4/m4.html' -description = """GNU M4 is an implementation of the traditional Unix macro processor. It is mostly SVR4 compatible - although it has some extensions (for example, handling more than 9 positional parameters to macros). - GNU M4 also has built-in functions for including files, running shell commands, doing arithmetic, etc.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -patches = ['M4-1.4.18_glibc_2.28.patch'] -checksums = [ - 'ab2633921a5cd38e48797bf5521ad259bdc4b979078034a3b790d7fec5493fab', # m4-1.4.18.tar.gz - # M4-1.4.18_glibc_2.28.patch - 'a613c18f00b1a3caa46ae4b8b849a0f4f71095ad860f4fcd6c6bb4ae211681fa', -] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [('binutils', '2.36.1', '', SYSTEM)] - -# '-fgnu89-inline' is required to avoid linking errors with older glibc's, -# see https://github.com/easybuilders/easybuild-easyconfigs/issues/529 -configopts = "--enable-c++ CPPFLAGS=-fgnu89-inline" - -sanity_check_paths = { - 'files': ['bin/m4'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/m/M4/M4-1.4.18-GCCcore-9.3.0.eb b/Golden_Repo/m/M4/M4-1.4.18-GCCcore-9.3.0.eb deleted file mode 100644 index 4dd2e09cef003c70fa54edefbcf3e251971c7018..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/M4/M4-1.4.18-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'M4' -version = '1.4.18' - -homepage = 'http://www.gnu.org/software/m4/m4.html' -description = """GNU M4 is an implementation of the traditional Unix macro processor. It is mostly SVR4 compatible - although it has some extensions (for example, handling more than 9 positional parameters to macros). - GNU M4 also has built-in functions for including files, running shell commands, doing arithmetic, etc. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -patches = ['m4-1.4.18-glibc-change-work-around.patch'] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [('binutils', '2.34', '', SYSTEM)] - -configopts = "--enable-cxx" - -sanity_check_paths = { - 'files': ["bin/m4"], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/m/M4/M4-1.4.18.eb b/Golden_Repo/m/M4/M4-1.4.18.eb deleted file mode 100644 index a03bcc7ab52b62854890714b417e31952a178a92..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/M4/M4-1.4.18.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'M4' -version = '1.4.18' - -homepage = 'http://www.gnu.org/software/m4/m4.html' -description = """GNU M4 is an implementation of the traditional Unix macro processor. - It is mostly SVR4 compatible although it has some extensions - (for example, handling more than 9 positional parameters to macros). - GNU M4 also has built-in functions for including files, running shell commands, doing arithmetic, etc. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -patches = ['m4-1.4.18-glibc-change-work-around.patch'] - -configopts = "--enable-cxx" - -sanity_check_paths = { - 'files': ["bin/m4"], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/m/M4/M4-1.4.18_glibc_2.28.patch b/Golden_Repo/m/M4/M4-1.4.18_glibc_2.28.patch deleted file mode 100644 index 0dcef3af42a3b17442cfb51ebd84acbc455562d3..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/M4/M4-1.4.18_glibc_2.28.patch +++ /dev/null @@ -1,56 +0,0 @@ -fix problems occuring from changes in glibc 2.28 -https://github.com/coreutils/gnulib/commit/4af4a4a71827c0bc5e0ec67af23edef4f15cee8e - -diff -Naur m4-1.4.18_old/lib/freadahead.c m4-1.4.18/lib/freadahead.c ---- m4-1.4.18_old/lib/freadahead.c 2019-01-08 15:26:47.835617107 +0100 -+++ m4-1.4.18/lib/freadahead.c 2019-01-08 15:29:50.827211620 +0100 -@@ -25,7 +25,7 @@ - size_t - freadahead (FILE *fp) - { --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - if (fp->_IO_write_ptr > fp->_IO_write_base) - return 0; - return (fp->_IO_read_end - fp->_IO_read_ptr) - -diff -Naur m4-1.4.18_old/lib/fseeko.c m4-1.4.18/lib/fseeko.c ---- m4-1.4.18_old/lib/fseeko.c 2019-01-08 15:26:47.831617117 +0100 -+++ m4-1.4.18/lib/fseeko.c 2019-01-08 15:29:50.835211605 +0100 -@@ -47,7 +47,7 @@ - #endif - - /* These tests are based on fpurge.c. */ --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - if (fp->_IO_read_end == fp->_IO_read_ptr - && fp->_IO_write_ptr == fp->_IO_write_base - && fp->_IO_save_base == NULL) -@@ -123,7 +123,7 @@ - return -1; - } - --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - fp->_flags &= ~_IO_EOF_SEEN; - fp->_offset = pos; - #elif defined __sferror || defined __DragonFly__ || defined __ANDROID__ - -diff -Naur m4-1.4.18_old/lib/stdio-impl.h m4-1.4.18/lib/stdio-impl.h ---- m4-1.4.18_old/lib/stdio-impl.h 2019-01-08 15:26:47.831617117 +0100 -+++ m4-1.4.18/lib/stdio-impl.h 2019-01-08 15:29:50.835211605 +0100 -@@ -21,6 +21,13 @@ - - /* BSD stdio derived implementations. */ - -+/* Glibc 2.28 made _IO_IN_BACKUP private. For now, work around this -+ problem by defining it ourselves. FIXME: Do not rely on glibc -+ internals. */ -+#if !defined _IO_IN_BACKUP && defined _IO_EOF_SEEN -+# define _IO_IN_BACKUP 0x100 -+#endif -+ - #if defined __NetBSD__ /* NetBSD */ - /* Get __NetBSD_Version__. */ - # include <sys/param.h> - diff --git a/Golden_Repo/m/M4/m4-1.4.18-glibc-change-work-around.patch b/Golden_Repo/m/M4/m4-1.4.18-glibc-change-work-around.patch deleted file mode 100644 index 582ae9b536c33d3f2d2727b37b1e3a832bc0ef42..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/M4/m4-1.4.18-glibc-change-work-around.patch +++ /dev/null @@ -1,115 +0,0 @@ -diff -up m4-1.4.18/lib/fflush.c.orig m4-1.4.18/lib/fflush.c ---- m4-1.4.18/lib/fflush.c.orig 2018-05-02 12:35:59.536851666 +0200 -+++ m4-1.4.18/lib/fflush.c 2018-05-02 12:37:02.768958606 +0200 -@@ -33,7 +33,7 @@ - #undef fflush - - --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - - /* Clear the stream's ungetc buffer, preserving the value of ftello (fp). */ - static void -@@ -72,7 +72,7 @@ clear_ungetc_buffer (FILE *fp) - - #endif - --#if ! (defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */) -+#if ! (defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */) - - # if (defined __sferror || defined __DragonFly__ || defined __ANDROID__) && defined __SNPT - /* FreeBSD, NetBSD, OpenBSD, DragonFly, Mac OS X, Cygwin, Android */ -@@ -148,7 +148,7 @@ rpl_fflush (FILE *stream) - if (stream == NULL || ! freading (stream)) - return fflush (stream); - --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - - clear_ungetc_buffer_preserving_position (stream); - -diff -up m4-1.4.18/lib/fpending.c.orig m4-1.4.18/lib/fpending.c ---- m4-1.4.18/lib/fpending.c.orig 2018-05-02 12:35:32.305806774 +0200 -+++ m4-1.4.18/lib/fpending.c 2018-05-02 12:35:44.944827347 +0200 -@@ -32,7 +32,7 @@ __fpending (FILE *fp) - /* Most systems provide FILE as a struct and the necessary bitmask in - <stdio.h>, because they need it for implementing getc() and putc() as - fast macros. */ --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - return fp->_IO_write_ptr - fp->_IO_write_base; - #elif defined __sferror || defined __DragonFly__ || defined __ANDROID__ - /* FreeBSD, NetBSD, OpenBSD, DragonFly, Mac OS X, Cygwin, Android */ -diff -up m4-1.4.18/lib/fpurge.c.orig m4-1.4.18/lib/fpurge.c ---- m4-1.4.18/lib/fpurge.c.orig 2018-05-02 12:38:13.586078669 +0200 -+++ m4-1.4.18/lib/fpurge.c 2018-05-02 12:38:38.785121867 +0200 -@@ -62,7 +62,7 @@ fpurge (FILE *fp) - /* Most systems provide FILE as a struct and the necessary bitmask in - <stdio.h>, because they need it for implementing getc() and putc() as - fast macros. */ --# if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+# if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - fp->_IO_read_end = fp->_IO_read_ptr; - fp->_IO_write_ptr = fp->_IO_write_base; - /* Avoid memory leak when there is an active ungetc buffer. */ -diff -up m4-1.4.18/lib/freadahead.c.orig m4-1.4.18/lib/freadahead.c ---- m4-1.4.18/lib/freadahead.c.orig 2016-12-31 14:54:41.000000000 +0100 -+++ m4-1.4.18/lib/freadahead.c 2018-05-02 11:43:19.570336724 +0200 -@@ -25,7 +25,7 @@ - size_t - freadahead (FILE *fp) - { --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - if (fp->_IO_write_ptr > fp->_IO_write_base) - return 0; - return (fp->_IO_read_end - fp->_IO_read_ptr) -diff -up m4-1.4.18/lib/freading.c.orig m4-1.4.18/lib/freading.c ---- m4-1.4.18/lib/freading.c.orig 2018-05-02 12:37:33.970011368 +0200 -+++ m4-1.4.18/lib/freading.c 2018-05-02 12:37:59.393054359 +0200 -@@ -31,7 +31,7 @@ freading (FILE *fp) - /* Most systems provide FILE as a struct and the necessary bitmask in - <stdio.h>, because they need it for implementing getc() and putc() as - fast macros. */ --# if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+# if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - return ((fp->_flags & _IO_NO_WRITES) != 0 - || ((fp->_flags & (_IO_NO_READS | _IO_CURRENTLY_PUTTING)) == 0 - && fp->_IO_read_base != NULL)); -diff -up m4-1.4.18/lib/fseeko.c.orig m4-1.4.18/lib/fseeko.c ---- m4-1.4.18/lib/fseeko.c.orig 2018-05-02 11:44:17.947460233 +0200 -+++ m4-1.4.18/lib/fseeko.c 2018-05-02 12:39:49.537216897 +0200 -@@ -47,7 +47,7 @@ fseeko (FILE *fp, off_t offset, int when - #endif - - /* These tests are based on fpurge.c. */ --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - if (fp->_IO_read_end == fp->_IO_read_ptr - && fp->_IO_write_ptr == fp->_IO_write_base - && fp->_IO_save_base == NULL) -@@ -123,7 +123,7 @@ fseeko (FILE *fp, off_t offset, int when - return -1; - } - --#if defined _IO_ftrylockfile || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ -+#if defined _IO_EOF_SEEN || __GNU_LIBRARY__ == 1 /* GNU libc, BeOS, Haiku, Linux libc5 */ - fp->_flags &= ~_IO_EOF_SEEN; - fp->_offset = pos; - #elif defined __sferror || defined __DragonFly__ || defined __ANDROID__ -diff -up m4-1.4.18/lib/stdio-impl.h.orig m4-1.4.18/lib/stdio-impl.h ---- m4-1.4.18/lib/stdio-impl.h.orig 2016-12-31 14:54:42.000000000 +0100 -+++ m4-1.4.18/lib/stdio-impl.h 2018-05-02 11:43:19.570336724 +0200 -@@ -18,6 +18,12 @@ - the same implementation of stdio extension API, except that some fields - have different naming conventions, or their access requires some casts. */ - -+/* Glibc 2.28 made _IO_IN_BACKUP private. For now, work around this -+ problem by defining it ourselves. FIXME: Do not rely on glibc -+ internals. */ -+#if !defined _IO_IN_BACKUP && defined _IO_EOF_SEEN -+# define _IO_IN_BACKUP 0x100 -+#endif - - /* BSD stdio derived implementations. */ - diff --git a/Golden_Repo/m/MED/MED-4.0.0-gpsmpi-2020.eb b/Golden_Repo/m/MED/MED-4.0.0-gpsmpi-2020.eb deleted file mode 100644 index eb7247f3729154e1ed3782e0762818a94e7b075a..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MED/MED-4.0.0-gpsmpi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -# easyconfig file for Med File Library -# author: Metin Cakircali (Juelich Supercomputing Centre) -easyblock = 'ConfigureMake' - -name = 'MED' -version = '4.0.0' - -homepage = 'http://salome-platform.org/' -description = """Initially defined by EDF R&D, -this format has been defined and maintained -through a MED working group comprising members of -EDF R&D and CEA (the Code Saturne team being represented). -""" - -site_contacts = 's.koh@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -sources = [{ - 'source_urls': ['http://files.salome-platform.org/Salome/other/'], - 'filename': 'med-%(version)s.tar.gz' -}] - -dependencies = [ - ('Python', '3.8.5'), - ('SWIG', '4.0.2', '-Python-%(pyver)s'), - ('HDF5', '1.10.6'), -] - -# better to configure these dependents explicitly -configopts = ['--with-f90 --with-hdf5=$EBROOTHDF5 --with-swig=$EBROOTSWIG'] - -moduleclass = 'lib' diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-GCC-10.3.0.eb b/Golden_Repo/m/METIS/METIS-5.1.0-GCC-10.3.0.eb deleted file mode 100644 index c5814101c5fceabe71d6b40ccb1d74a01c4c7317..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-GCC-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'METIS' -version = '5.1.0' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' -description = """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, -and producing fill reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the -multilevel recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD', -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -configopts = ['', 'shared=1'] - -moduleclass = 'math' diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-GCC-9.3.0-RTW64-IDX32.eb b/Golden_Repo/m/METIS/METIS-5.1.0-GCC-9.3.0-RTW64-IDX32.eb deleted file mode 100644 index e56df6795456bc991eced6633aa90c5a89e945a5..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-GCC-9.3.0-RTW64-IDX32.eb +++ /dev/null @@ -1,31 +0,0 @@ -name = 'METIS' -version = '5.1.0' -versionsuffix = '-RTW64-IDX32' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' -description = """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, -and producing fill reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the -multilevel recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD', -] - -# We use 32bit for indices and 64bit for content -patches = ['METIS-5.1.0-use-doubles.patch'] - -builddependencies = [ - ('CMake', '3.18.0') -] - -configopts = ['', 'shared=1'] - -moduleclass = 'math' diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-GCC-9.3.0.eb b/Golden_Repo/m/METIS/METIS-5.1.0-GCC-9.3.0.eb deleted file mode 100644 index 0035cdc5960a6f8ea17fad0fbf83484277ed3fb0..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-GCC-9.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'METIS' -version = '5.1.0' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' -description = """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, -and producing fill reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the -multilevel recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD', -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -configopts = ['', 'shared=1'] - -moduleclass = 'math' diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-GCCcore-10.3.0-IDX64.eb b/Golden_Repo/m/METIS/METIS-5.1.0-GCCcore-10.3.0-IDX64.eb deleted file mode 100644 index e2fa5086c4e5dac0f3c554460d5ef80d560c3cf0..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-GCCcore-10.3.0-IDX64.eb +++ /dev/null @@ -1,31 +0,0 @@ -name = 'METIS' -version = '5.1.0' -versionsuffix = '-IDX64' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' -description = """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, -and producing fill reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the -multilevel recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD', -] - -patches = ['METIS-5.1.0-IDX64.patch'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM) -] - -configopts = ['', 'shared=1'] - -moduleclass = 'math' diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-GCCcore-9.3.0-IDX64.eb b/Golden_Repo/m/METIS/METIS-5.1.0-GCCcore-9.3.0-IDX64.eb deleted file mode 100644 index 6f09fd0acd3edc23729604f2686b74268ddbb138..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-GCCcore-9.3.0-IDX64.eb +++ /dev/null @@ -1,31 +0,0 @@ -name = 'METIS' -version = '5.1.0' -versionsuffix = '-IDX64' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' -description = """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, -and producing fill reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the -multilevel recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD', -] - -patches = ['METIS-5.1.0-IDX64.patch'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0') -] - -configopts = ['', 'shared=1'] - -moduleclass = 'math' diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-IDX64.patch b/Golden_Repo/m/METIS/METIS-5.1.0-IDX64.patch deleted file mode 100644 index 70816692329c2a4083b45f4abb2bc782b31cacb3..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-IDX64.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- include/metis_orig.h 2021-01-20 15:05:51.807203000 +0100 -+++ include/metis.h 2021-01-20 15:07:06.391591000 +0100 -@@ -30,7 +30,7 @@ - GCC does provides these definitions in stdint.h, but it may require some - modifications on other architectures. - --------------------------------------------------------------------------*/ --#define IDXTYPEWIDTH 32 -+#define IDXTYPEWIDTH 64 - - - /*-------------------------------------------------------------------------- diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-iccifort-2020.2.254-GCC-9.3.0-RTW64-IDX32.eb b/Golden_Repo/m/METIS/METIS-5.1.0-iccifort-2020.2.254-GCC-9.3.0-RTW64-IDX32.eb deleted file mode 100644 index fa7af0f4cf07f82addb7d9038fa7a61e12a834f4..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-iccifort-2020.2.254-GCC-9.3.0-RTW64-IDX32.eb +++ /dev/null @@ -1,31 +0,0 @@ -name = 'METIS' -version = '5.1.0' -versionsuffix = '-RTW64-IDX32' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' -description = """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, -and producing fill reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the -multilevel recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD', -] - -# We use 32bit for indices and 64bit for content -patches = ['METIS-5.1.0-use-doubles.patch'] - -builddependencies = [ - ('CMake', '3.18.0') -] - -configopts = ['', 'shared=1'] - -moduleclass = 'math' diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/m/METIS/METIS-5.1.0-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index ffc040f18407395b1ae91dea5d344693dd45ade7..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'METIS' -version = '5.1.0' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' -description = """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, -and producing fill reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the -multilevel recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD', -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -configopts = ['', 'shared=1'] - -moduleclass = 'math' diff --git a/Golden_Repo/m/METIS/METIS-5.1.0-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/m/METIS/METIS-5.1.0-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index 3b70dd555217df1c32ab792d1254c40c92743b7a..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/METIS/METIS-5.1.0-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'METIS' -version = '5.1.0' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' -description = """METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, -and producing fill reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the -multilevel recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD', -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -configopts = ['', 'shared=1'] - -moduleclass = 'math' diff --git a/Golden_Repo/m/MPFR/MPFR-4.1.0-GCCcore-10.3.0.eb b/Golden_Repo/m/MPFR/MPFR-4.1.0-GCCcore-10.3.0.eb deleted file mode 100644 index 4a0253ef5901bd09ac066dfbe4add691819f1544..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MPFR/MPFR-4.1.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'MPFR' -version = '4.1.0' - -homepage = 'http://www.mpfr.org' -description = 'The MPFR library is a C library for multiple-precision floating-point computations with correct rounding' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_BZ2] -source_urls = ['http://www.%(namelower)s.org/%(namelower)s-%(version)s/'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('GMP', '6.2.0'), -] - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libmpfr.so', 'include/%(namelower)s.h'], - 'dirs': [], -} - -moduleclass = 'math' diff --git a/Golden_Repo/m/MPFR/MPFR-4.1.0-GCCcore-9.3.0.eb b/Golden_Repo/m/MPFR/MPFR-4.1.0-GCCcore-9.3.0.eb deleted file mode 100644 index 537ac74218b0b16ef94ada6d320637c839717eec..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MPFR/MPFR-4.1.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'MPFR' -version = '4.1.0' - -homepage = 'http://www.mpfr.org' -description = 'The MPFR library is a C library for multiple-precision floating-point computations with correct rounding' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_BZ2] -source_urls = ['http://www.%(namelower)s.org/%(namelower)s-%(version)s/'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('GMP', '6.2.0'), -] - -runtest = 'check' - -sanity_check_paths = { - 'files': ['lib/libmpfr.so', 'include/%(namelower)s.h'], - 'dirs': [], -} - -moduleclass = 'math' diff --git a/Golden_Repo/m/MUMPS/MUMPS-5.1.2_examples_mkl.patch b/Golden_Repo/m/MUMPS/MUMPS-5.1.2_examples_mkl.patch deleted file mode 100644 index b2c52f828121628497cba5bed2915f1d305cc11f..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUMPS/MUMPS-5.1.2_examples_mkl.patch +++ /dev/null @@ -1,223 +0,0 @@ ---- MUMPS_5.1.2/README 2017-10-02 09:37:01.000000000 +0200 -+++ MUMPS_5.1.2_ok/README 2018-04-26 16:29:07.000000000 +0200 -@@ -59,3 +59,4 @@ - SCILAB contains a SCILAB interface to the sequential version of MUMPS - (only tested with scilab version 4) - -+Nothing, just make diff happy ---- MUMPS_5.1.2/examples/Makefile_built 2018-04-26 16:25:57.432924000 +0200 -+++ MUMPS_5.1.2_ok/examples/Makefile_built 2018-04-26 16:00:42.000000000 +0200 -@@ -0,0 +1,105 @@ -+# -+# This file is part of MUMPS 5.1.2, released -+# on Mon Oct 2 07:37:01 UTC 2017 -+# -+ -+CC = mpicc -+FC = mpif77 -+FL = mpif77 -+ -+LIBBLAS = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -+SCALAP=-lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LIBPAR = $(SCALAP) $(LIBBLAS) -+ -+#Preprocessor defs for calling Fortran from C (-DAdd_ or -DAdd__ or -DUPPER) -+CDEFS = -DAdd_ -+ -+#Begin Optimized options -+OPTF = -O -nofor_main -qopenmp -Dintel_ -DALLOW_NON_INIT -+OPTL = -O -nofor_main -qopenmp -+OPTC = -O -qopenmp -+#End Optimized options -+ -+default: d -+ -+.PHONY: default all s d c z multi clean -+.SECONDEXPANSION: -+ -+all: c z s d multi -+ -+c: csimpletest -+z: zsimpletest -+s: ssimpletest -+d: dsimpletest c_example -+multi: multiple_arithmetics_example -+ -+SCOTCHDIR=$(EBROOTSCOTCH) -+LMETISDIR=$(EBROOTPARMETIS) -+LMETIS=-L$(EBROOTPARMETIS)/lib -lparmetis -lmetis -+LSCOTCH=-L$(EBROOTSCOTCH)/lib -lptesmumps -lptscotch -lptscotcherr -lesmumps -lscotch -lscotcherr -+LPORD=-L$(MUMPS_LIB) -lpord -+ -+LIBMUMPS_COMMON = -L$(MUMPS_LIB) -lmumps_common -+ -+LORDERINGS=$(LMETIS) $(LPORD) $(LSCOTCH) -+ -+LIBSMUMPS = -L$(MUMPS_LIB) -lsmumps $(LIBMUMPS_COMMON) -+ -+ssimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) ssimpletest.o $(LIBSMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBDMUMPS = -L$(MUMPS_LIB) -ldmumps $(LIBMUMPS_COMMON) -+ -+dsimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) dsimpletest.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBCMUMPS = -L$(MUMPS_LIB) -lcmumps $(LIBMUMPS_COMMON) -+ -+csimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) csimpletest.o $(LIBCMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBZMUMPS = -L$(MUMPS_LIB) -lzmumps $(LIBMUMPS_COMMON) -+ -+zsimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) zsimpletest.o $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+c_example: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+multiple_arithmetics_example: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBSMUMPS) $(LIBDMUMPS) $(LIBCMUMPS) $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+.SUFFIXES: .c .F .o -+.F.o: -+ $(FC) $(OPTF) $(INCS) -I. -I$(MUMPS_INCLUDE) -c $*.F -o $*.o -+.c.o: -+ $(CC) $(OPTC) $(INCS) $(CDEFS) -I. -I$(MUMPS_INCLUDE) -c $*.c -o $*.o -+ -+ -+$(libdir)/libsmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(libdir)/libdmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(libdir)/libcmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(libdir)/libzmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(LIBMUMPS_COMMON): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+clean: -+ $(RM) *.o [sdcz]simpletest c_example multiple_arithmetics_example ---- MUMPS_5.1.2/examples/Makefile_built_gpsmkl 1970-01-01 01:00:00.000000000 +0100 -+++ MUMPS_5.1.2_ok/examples/Makefile_built_gpsmkl 2019-04-16 11:39:03.520513000 +0200 -@@ -0,0 +1,105 @@ -+# -+# This file is part of MUMPS 5.1.2, released -+# on Mon Oct 2 07:37:01 UTC 2017 -+# -+ -+CC = mpicc -+FC = mpif77 -+FL = mpif77 -+ -+LIBBLAS = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl -+SCALAP=-lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LIBPAR = $(SCALAP) $(LIBBLAS) -+ -+#Preprocessor defs for calling Fortran from C (-DAdd_ or -DAdd__ or -DUPPER) -+CDEFS = -DAdd_ -+ -+#Begin Optimized options -+OPTF = -O -fopenmp -+OPTL = -O -fopenmp -+OPTC = -O -fopenmp -+#End Optimized options -+ -+default: d -+ -+.PHONY: default all s d c z multi clean -+.SECONDEXPANSION: -+ -+all: c z s d multi -+ -+c: csimpletest -+z: zsimpletest -+s: ssimpletest -+d: dsimpletest c_example -+multi: multiple_arithmetics_example -+ -+SCOTCHDIR=$(EBROOTSCOTCH) -+LMETISDIR=$(EBROOTPARMETIS) -+LMETIS=-L$(EBROOTPARMETIS)/lib -lparmetis -lmetis -+LSCOTCH=-L$(EBROOTSCOTCH)/lib -lptesmumps -lptscotch -lptscotcherr -lesmumps -lscotch -lscotcherr -+LPORD=-L$(MUMPS_LIB) -lpord -+ -+LIBMUMPS_COMMON = -L$(MUMPS_LIB) -lmumps_common -+ -+LORDERINGS=$(LMETIS) $(LPORD) $(LSCOTCH) -+ -+LIBSMUMPS = -L$(MUMPS_LIB) -lsmumps $(LIBMUMPS_COMMON) -+ -+ssimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) ssimpletest.o $(LIBSMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBDMUMPS = -L$(MUMPS_LIB) -ldmumps $(LIBMUMPS_COMMON) -+ -+dsimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) dsimpletest.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBCMUMPS = -L$(MUMPS_LIB) -lcmumps $(LIBMUMPS_COMMON) -+ -+csimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) csimpletest.o $(LIBCMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBZMUMPS = -L$(MUMPS_LIB) -lzmumps $(LIBMUMPS_COMMON) -+ -+zsimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) zsimpletest.o $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+c_example: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+multiple_arithmetics_example: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBSMUMPS) $(LIBDMUMPS) $(LIBCMUMPS) $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+.SUFFIXES: .c .F .o -+.F.o: -+ $(FC) $(OPTF) $(INCS) -I. -I$(MUMPS_INCLUDE) -c $*.F -o $*.o -+.c.o: -+ $(CC) $(OPTC) $(INCS) $(CDEFS) -I. -I$(MUMPS_INCLUDE) -c $*.c -o $*.o -+ -+ -+$(libdir)/libsmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(libdir)/libdmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(libdir)/libcmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(libdir)/libzmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(LIBMUMPS_COMMON): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+clean: -+ $(RM) *.o [sdcz]simpletest c_example multiple_arithmetics_example diff --git a/Golden_Repo/m/MUMPS/MUMPS-5.2.1_shared-pord.patch b/Golden_Repo/m/MUMPS/MUMPS-5.2.1_shared-pord.patch deleted file mode 100644 index 10edc5b7eb89aeb64f824c3add960bf8f7ceaecb..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUMPS/MUMPS-5.2.1_shared-pord.patch +++ /dev/null @@ -1,80 +0,0 @@ -source: https://src.fedoraproject.org/rpms/MUMPS/blob/master/f/MUMPS-shared-pord.patch -author: Antonio Trande (sagitter@fedoraproject.org) -Create static and shared versions of the PORD library. - -Index: mumps/PORD/lib/Makefile -=================================================================== ---- mumps.orig/PORD/lib/Makefile -+++ mumps/PORD/lib/Makefile -@@ -9,7 +9,7 @@ - - INCLUDES = -I../include - --COPTIONS = $(INCLUDES) $(CFLAGS) $(OPTFLAGS) -+COPTIONS = $(INCLUDES) $(CFLAGS) $(OPTFLAGS) -fPIC - - OBJS = graph.o gbipart.o gbisect.o ddcreate.o ddbisect.o nestdiss.o \ - multisector.o gelim.o bucket.o tree.o \ -@@ -24,12 +24,16 @@ - .c.o: - $(CC) $(COPTIONS) -c $*.c $(OUTC)$*.o - --libpord$(LIBEXT):$(OBJS) -- $(AR)$@ $(OBJS) -+libpord$(PLAT).a:$(OBJS) -+ $(AR) $@ $(OBJS) - $(RANLIB) $@ - -+libpord$(PLAT).so: $(OBJS) -+ $(CC) -shared $(OBJS) -Wl,-soname,libpord$(PLAT)-$(SONAME_VERSION).so -o libpord$(PLAT)-$(SONAME_VERSION).so $(OPTL) -Wl,-z,defs -+ ln -fs libpord$(PLAT)-$(SONAME_VERSION).so $@ -+ - clean: - rm -f *.o - - realclean: -- rm -f *.o libpord.a -+ rm -f *.o libpord*.a *.so -Index: mumps/Makefile -=================================================================== ---- mumps.orig/Makefile -+++ mumps/Makefile -@@ -54,7 +54,7 @@ - multi_example: s d c z - (cd examples ; $(MAKE) multi) - --requiredobj: Makefile.inc $(LIBSEQNEEDED) $(libdir)/libpord$(PLAT)$(LIBEXT) -+requiredobj: Makefile.inc $(LIBSEQNEEDED) $(libdir)/libpord$(PLAT).a $(libdir)/libpord$(PLAT).so - - # dummy MPI library (sequential version) - -@@ -62,19 +62,26 @@ - (cd libseq; $(MAKE)) - - # Build the libpord.a library and copy it into $(topdir)/lib --$(libdir)/libpord$(PLAT)$(LIBEXT): -+$(libdir)/libpord$(PLAT).a: - if [ "$(LPORDDIR)" != "" ] ; then \ - cd $(LPORDDIR); \ - $(MAKE) CC="$(CC)" CFLAGS="$(OPTC)" AR="$(AR)" RANLIB="$(RANLIB)" OUTC="$(OUTC)" LIBEXT=$(LIBEXT); \ - fi; - if [ "$(LPORDDIR)" != "" ] ; then \ -- cp $(LPORDDIR)/libpord$(LIBEXT) $@; \ -+ cp $(LPORDDIR)/libpord$(PLAT).a $@; \ - fi; - -+$(libdir)/libpord$(PLAT).so: -+ if [ "$(LPORDDIR)" != "" ] ; then \ -+ cd $(LPORDDIR); make CC="$(CC)" CFLAGS="$(OPTC)" AR="$(AR)" ARFUNCT= RANLIB="$(RANLIB)" libpord$(PLAT).so; fi; -+ if [ "$(LPORDDIR)" != "" ] ; then \ -+ cp -a $(LPORDDIR)/libpord*.so lib/; fi; -+ -+ - clean: - (cd src; $(MAKE) clean) - (cd examples; $(MAKE) clean) -- (cd $(libdir); $(RM) *$(PLAT)$(LIBEXT)) -+ (cd $(libdir); $(RM) *$(PLAT).a *$(PLAT).so) - (cd libseq; $(MAKE) clean) - if [ "$(LPORDDIR)" != "" ] ; then \ - cd $(LPORDDIR); $(MAKE) realclean; \ diff --git a/Golden_Repo/m/MUMPS/MUMPS-5.3.4-gpsmkl-2020.eb b/Golden_Repo/m/MUMPS/MUMPS-5.3.4-gpsmkl-2020.eb deleted file mode 100644 index bfd8d4528098aa1691b22872dd04f658284e2e69..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUMPS/MUMPS-5.3.4-gpsmkl-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -name = 'MUMPS' -version = '5.3.4' - -homepage = 'http://graal.ens-lyon.fr/MUMPS/' -description = """MUMPS, A parallel sparse direct solver has been installed as module in $EBROOTMUMPS -It contains all precisions and can use SCOTCH as well as ParMETIS. -""" - -usage = """There ara four MUMPS libraries for the four different precisions: - -libsmumps.a for single precision real -libdmumps.a for double precision real -libcmumps.a for single precision complex -libzmumps.a for double precision complex. -""" - -examples = """Examples can be found in $EBROOTMUMPS/examples.""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ['http://mumps.enseeiht.fr/'] -sources = ['%(name)s_%(version)s.tar.gz'] - -patches = [ - 'MUMPS-%(version)s_examples_mkl.patch', - 'MUMPS-%(version)s_shared-pord.patch', # builds the shared libs of PORD -] - -dependencies = [ - ('SCOTCH', '6.1.0'), - ('ParMETIS', '4.0.3'), -] - -buildopts = 'all' - -parallel = 1 - -modextravars = { - 'MUMPS_ROOT': '%(installdir)s', - 'MUMPSROOT': '%(installdir)s', - 'MUMPS_INCLUDE': '%(installdir)s/include', - 'MUMPS_LIB': '%(installdir)s/lib' -} - -postinstallcmds = [ - "cp -r %(builddir)s/MUMPS_%(version)s/examples %(installdir)s/examples", - "rm %(installdir)s/examples/*.o", - "mv %(installdir)s/examples/Makefile_installed_gnu %(installdir)s/examples/Makefile", - "rm %(installdir)s/examples/Makefile_installed*.orig", - "rm %(installdir)s/examples/Makefile_installed", - "rm %(installdir)s/examples/?simpletest", - "rm %(installdir)s/examples/?simpletest_save_restore", - "rm %(installdir)s/examples/c_example", - "rm %(installdir)s/examples/c_example_save_restore", - "chmod 644 %(installdir)s/examples/*", -] - -moduleclass = 'math' diff --git a/Golden_Repo/m/MUMPS/MUMPS-5.3.4-intel-2020.eb b/Golden_Repo/m/MUMPS/MUMPS-5.3.4-intel-2020.eb deleted file mode 100644 index fee34f72eecfe01596eb3672a6a29afaea7de7d0..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUMPS/MUMPS-5.3.4-intel-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -name = 'MUMPS' -version = '5.3.4' - -homepage = 'http://graal.ens-lyon.fr/MUMPS/' -description = """MUMPS, A parallel sparse direct solver has been installed as module in $EBROOTMUMPS -It contains all precisions and can use SCOTCH as well as ParMETIS. -""" - -usage = """There ara four MUMPS libraries for the four different precisions: - -libsmumps.a for single precision real -libdmumps.a for double precision real -libcmumps.a for single precision complex -libzmumps.a for double precision complex. -""" - -examples = """Examples can be found in $EBROOTMUMPS/examples.""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ['http://mumps.enseeiht.fr/'] -sources = ['%(name)s_%(version)s.tar.gz'] - -patches = [ - 'MUMPS-%(version)s_examples_mkl.patch', - 'MUMPS-%(version)s_shared-pord.patch', # builds the shared libs of PORD -] - -dependencies = [ - ('SCOTCH', '6.1.0'), - ('ParMETIS', '4.0.3'), -] - -buildopts = 'all' - -parallel = 1 - -modextravars = { - 'MUMPS_ROOT': '%(installdir)s', - 'MUMPSROOT': '%(installdir)s', - 'MUMPS_INCLUDE': '%(installdir)s/include', - 'MUMPS_LIB': '%(installdir)s/lib' -} - -postinstallcmds = [ - "cp -r %(builddir)s/MUMPS_%(version)s/examples %(installdir)s/examples", - "rm %(installdir)s/examples/*.o", - "mv %(installdir)s/examples/Makefile_installed %(installdir)s/examples/Makefile", - "rm %(installdir)s/examples/Makefile_installed*.orig", - "rm %(installdir)s/examples/Makefile_installed_gnu", - "rm %(installdir)s/examples/?simpletest", - "rm %(installdir)s/examples/?simpletest_save_restore", - "rm %(installdir)s/examples/c_example", - "rm %(installdir)s/examples/c_example_save_restore", - "chmod 644 %(installdir)s/examples/*", -] - -moduleclass = 'math' diff --git a/Golden_Repo/m/MUMPS/MUMPS-5.3.4-intel-para-2020.eb b/Golden_Repo/m/MUMPS/MUMPS-5.3.4-intel-para-2020.eb deleted file mode 100644 index c77aef1b81697db4f32b64f6691e89a8e1c4f0ee..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUMPS/MUMPS-5.3.4-intel-para-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -name = 'MUMPS' -version = '5.3.4' - -homepage = 'http://graal.ens-lyon.fr/MUMPS/' -description = """MUMPS, A parallel sparse direct solver has been installed as module in $EBROOTMUMPS -It contains all precisions and can use SCOTCH as well as ParMETIS. -""" - -usage = """There ara four MUMPS libraries for the four different precisions: - -libsmumps.a for single precision real -libdmumps.a for double precision real -libcmumps.a for single precision complex -libzmumps.a for double precision complex. -""" - -examples = """Examples can be found in $EBROOTMUMPS/examples.""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'openmp': True} - -source_urls = ['http://mumps.enseeiht.fr/'] -sources = ['%(name)s_%(version)s.tar.gz'] - -patches = [ - 'MUMPS-%(version)s_examples_mkl.patch', - 'MUMPS-%(version)s_shared-pord.patch', # builds the shared libs of PORD -] - -dependencies = [ - ('SCOTCH', '6.1.0'), - ('ParMETIS', '4.0.3'), -] - -buildopts = 'all' - -parallel = 1 - -modextravars = { - 'MUMPS_ROOT': '%(installdir)s', - 'MUMPSROOT': '%(installdir)s', - 'MUMPS_INCLUDE': '%(installdir)s/include', - 'MUMPS_LIB': '%(installdir)s/lib' -} - -postinstallcmds = [ - "cp -r %(builddir)s/MUMPS_%(version)s/examples %(installdir)s/examples", - "rm %(installdir)s/examples/*.o", - "mv %(installdir)s/examples/Makefile_installed %(installdir)s/examples/Makefile", - "rm %(installdir)s/examples/Makefile_installed*.orig", - "rm %(installdir)s/examples/Makefile_installed_gnu", - "rm %(installdir)s/examples/?simpletest", - "rm %(installdir)s/examples/?simpletest_save_restore", - "rm %(installdir)s/examples/c_example", - "rm %(installdir)s/examples/c_example_save_restore", - "chmod 644 %(installdir)s/examples/*", -] - -moduleclass = 'math' diff --git a/Golden_Repo/m/MUMPS/MUMPS-5.3.4_examples_mkl.patch b/Golden_Repo/m/MUMPS/MUMPS-5.3.4_examples_mkl.patch deleted file mode 100644 index ad19bf66bbdf1bf658a64fd4954dae63b8789bd5..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUMPS/MUMPS-5.3.4_examples_mkl.patch +++ /dev/null @@ -1,252 +0,0 @@ ---- MUMPS_5.3.4/VERSION 2020-09-28 09:16:41.000000000 +0200 -+++ MUMPS_5.3.4_ok/VERSION 2020-10-20 10:26:15.553979000 +0200 -@@ -1,2 +1,3 @@ - MUMPS 5.3.4 - Mon Sep 28 07:16:41 UTC 2020 -+ ---- MUMPS_5.3.4/examples/Makefile_installed 1970-01-01 01:00:00.000000000 +0100 -+++ MUMPS_5.3.4_ok/examples/Makefile_installed 2020-10-20 14:54:57.933120000 +0200 -@@ -0,0 +1,120 @@ -+# -+# This file is part of MUMPS 5.3.4, released -+# on Mon Sep 28 07:16:41 UTC 2020 -+# -+CC = mpicc -+FC = mpif77 -+FL = mpif77 -+ -+LIBBLAS = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -+SCALAP=-lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LIBPAR = $(SCALAP) $(LIBBLAS) -+ -+#Preprocessor defs for calling Fortran from C (-DAdd_ or -DAdd__ or -DUPPER) -+CDEFS = -DAdd_ -+ -+#Begin Optimized options -+OPTF = -O -nofor_main -qopenmp -Dintel_ -DALLOW_NON_INIT -+OPTL = -O -nofor_main -qopenmp -+OPTC = -O -qopenmp -+#End Optimized options -+ -+ -+default: d -+ -+.PHONY: default all s d c z multi clean -+.SECONDEXPANSION: -+ -+all: c z s d multi -+ -+c: csimpletest csimpletest_save_restore -+z: zsimpletest zsimpletest_save_restore -+s: ssimpletest ssimpletest_save_restore -+d: dsimpletest dsimpletest_save_restore c_example_save_restore c_example -+multi: multiple_arithmetics_example -+ -+ -+SCOTCHDIR=$(EBROOTSCOTCH) -+LMETISDIR=$(EBROOTPARMETIS) -+LMETIS=-L$(EBROOTPARMETIS)/lib -lparmetis -lmetis -+LSCOTCH=-L$(EBROOTSCOTCH)/lib -lptesmumps -lptscotch -lptscotcherr -lesmumps -lscotch -lscotcherr -+LPORD=-L$(MUMPS_LIB) -lpord -+ -+LIBMUMPS_COMMON = -L$(MUMPS_LIB)/ -lmumps_common -+ -+LORDERINGS=$(LMETIS) $(LPORD) $(LSCOTCH) -+ -+LIBSMUMPS = -L$(MUMPS_LIB) -lsmumps $(LIBMUMPS_COMMON) -+ -+ssimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) ssimpletest.o $(LIBSMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBDMUMPS = -L$(MUMPS_LIB) -ldmumps $(LIBMUMPS_COMMON) -+ -+dsimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) dsimpletest.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBCMUMPS = -L$(MUMPS_LIB) -lcmumps$(PLAT)$(LIBEXT) $(LIBMUMPS_COMMON) -+ -+csimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) csimpletest.o $(LIBCMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBZMUMPS = -L$(MUMPS_LIB) -lzmumps$(PLAT)$(LIBEXT) $(LIBMUMPS_COMMON) -+ -+zsimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) zsimpletest.o $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+c_example: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+multiple_arithmetics_example: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBSMUMPS) $(LIBDMUMPS) $(LIBCMUMPS) $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ssimpletest_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) ssimpletest_save_restore.o $(LIBSMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+dsimpletest_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) dsimpletest_save_restore.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+csimpletest_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) csimpletest_save_restore.o $(LIBCMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+zsimpletest_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) zsimpletest_save_restore.o $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+c_example_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+.SUFFIXES: .c .F .o -+.F.o: -+ $(FC) $(OPTF) -I. -I$(MUMPS_INCLUDE) -c $*.F -+.c.o: -+ $(CC) $(OPTC) $(CDEFS) -I. -I$(MUMPS_INCLUDE) -c $*.c -+ -+ -+$(MUMPS_LIB)/libsmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(MUMPS_LIB)/libdmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(MUMPS_LIB)/libcmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(MUMPS_LIB)/libzmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(LIBMUMPS_COMMON): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+clean: -+ $(RM) *.o [sdcz]simpletest c_example multiple_arithmetics_example ssimpletest_save_restore dsimpletest_save_restore csimpletest_save_restore zsimpletest_save_restore c_example_save_restore ---- MUMPS_5.3.4/examples/Makefile_installed_gnu 1970-01-01 01:00:00.000000000 +0100 -+++ MUMPS_5.3.4_ok/examples/Makefile_installed_gnu 2020-10-20 14:55:13.846779000 +0200 -@@ -0,0 +1,120 @@ -+# -+# This file is part of MUMPS 5.3.4, released -+# on Mon Sep 28 07:16:41 UTC 2020 -+# -+CC = mpicc -+FC = mpif77 -+FL = mpif77 -+ -+LIBBLAS = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lm -ldl -+SCALAP=-lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+LIBPAR = $(SCALAP) $(LIBBLAS) -+ -+#Preprocessor defs for calling Fortran from C (-DAdd_ or -DAdd__ or -DUPPER) -+CDEFS = -DAdd_ -+ -+#Begin Optimized options -+OPTF = -O -nofor_main -qopenmp -Dintel_ -DALLOW_NON_INIT -+OPTL = -O -nofor_main -qopenmp -+OPTC = -O -qopenmp -+#End Optimized options -+ -+ -+default: d -+ -+.PHONY: default all s d c z multi clean -+.SECONDEXPANSION: -+ -+all: c z s d multi -+ -+c: csimpletest csimpletest_save_restore -+z: zsimpletest zsimpletest_save_restore -+s: ssimpletest ssimpletest_save_restore -+d: dsimpletest dsimpletest_save_restore c_example_save_restore c_example -+multi: multiple_arithmetics_example -+ -+ -+SCOTCHDIR=$(EBROOTSCOTCH) -+LMETISDIR=$(EBROOTPARMETIS) -+LMETIS=-L$(EBROOTPARMETIS)/lib -lparmetis -lmetis -+LSCOTCH=-L$(EBROOTSCOTCH)/lib -lptesmumps -lptscotch -lptscotcherr -lesmumps -lscotch -lscotcherr -+LPORD=-L$(MUMPS_LIB) -lpord -+ -+LIBMUMPS_COMMON = -L$(MUMPS_LIB)/ -lmumps_common -+ -+LORDERINGS=$(LMETIS) $(LPORD) $(LSCOTCH) -+ -+LIBSMUMPS = -L$(MUMPS_LIB) -lsmumps $(LIBMUMPS_COMMON) -+ -+ssimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) ssimpletest.o $(LIBSMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBDMUMPS = -L$(MUMPS_LIB) -ldmumps $(LIBMUMPS_COMMON) -+ -+dsimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) dsimpletest.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBCMUMPS = -L$(MUMPS_LIB) -lcmumps$(PLAT)$(LIBEXT) $(LIBMUMPS_COMMON) -+ -+csimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) csimpletest.o $(LIBCMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+LIBZMUMPS = -L$(MUMPS_LIB) -lzmumps$(PLAT)$(LIBEXT) $(LIBMUMPS_COMMON) -+ -+zsimpletest: $$@.o -+ $(FL) -o $@ $(OPTL) zsimpletest.o $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+c_example: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ -+multiple_arithmetics_example: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBSMUMPS) $(LIBDMUMPS) $(LIBCMUMPS) $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+ssimpletest_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) ssimpletest_save_restore.o $(LIBSMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+dsimpletest_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) dsimpletest_save_restore.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+csimpletest_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) csimpletest_save_restore.o $(LIBCMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+zsimpletest_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) zsimpletest_save_restore.o $(LIBZMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+c_example_save_restore: $$@.o -+ $(FL) -o $@ $(OPTL) $@.o $(LIBDMUMPS) $(LORDERINGS) $(LIBPAR) -+ -+.SUFFIXES: .c .F .o -+.F.o: -+ $(FC) $(OPTF) -I. -I$(MUMPS_INCLUDE) -c $*.F -+.c.o: -+ $(CC) $(OPTC) $(CDEFS) -I. -I$(MUMPS_INCLUDE) -c $*.c -+ -+ -+$(MUMPS_LIB)/libsmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(MUMPS_LIB)/libdmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(MUMPS_LIB)/libcmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(MUMPS_LIB)/libzmumps$(PLAT)$(LIBEXT): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+$(LIBMUMPS_COMMON): -+ @echo 'Error: you should build the library' $@ 'first' -+ exit 1 -+ -+clean: -+ $(RM) *.o [sdcz]simpletest c_example multiple_arithmetics_example ssimpletest_save_restore dsimpletest_save_restore csimpletest_save_restore zsimpletest_save_restore c_example_save_restore diff --git a/Golden_Repo/m/MUMPS/MUMPS-5.3.4_shared-pord.patch b/Golden_Repo/m/MUMPS/MUMPS-5.3.4_shared-pord.patch deleted file mode 100644 index 10edc5b7eb89aeb64f824c3add960bf8f7ceaecb..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUMPS/MUMPS-5.3.4_shared-pord.patch +++ /dev/null @@ -1,80 +0,0 @@ -source: https://src.fedoraproject.org/rpms/MUMPS/blob/master/f/MUMPS-shared-pord.patch -author: Antonio Trande (sagitter@fedoraproject.org) -Create static and shared versions of the PORD library. - -Index: mumps/PORD/lib/Makefile -=================================================================== ---- mumps.orig/PORD/lib/Makefile -+++ mumps/PORD/lib/Makefile -@@ -9,7 +9,7 @@ - - INCLUDES = -I../include - --COPTIONS = $(INCLUDES) $(CFLAGS) $(OPTFLAGS) -+COPTIONS = $(INCLUDES) $(CFLAGS) $(OPTFLAGS) -fPIC - - OBJS = graph.o gbipart.o gbisect.o ddcreate.o ddbisect.o nestdiss.o \ - multisector.o gelim.o bucket.o tree.o \ -@@ -24,12 +24,16 @@ - .c.o: - $(CC) $(COPTIONS) -c $*.c $(OUTC)$*.o - --libpord$(LIBEXT):$(OBJS) -- $(AR)$@ $(OBJS) -+libpord$(PLAT).a:$(OBJS) -+ $(AR) $@ $(OBJS) - $(RANLIB) $@ - -+libpord$(PLAT).so: $(OBJS) -+ $(CC) -shared $(OBJS) -Wl,-soname,libpord$(PLAT)-$(SONAME_VERSION).so -o libpord$(PLAT)-$(SONAME_VERSION).so $(OPTL) -Wl,-z,defs -+ ln -fs libpord$(PLAT)-$(SONAME_VERSION).so $@ -+ - clean: - rm -f *.o - - realclean: -- rm -f *.o libpord.a -+ rm -f *.o libpord*.a *.so -Index: mumps/Makefile -=================================================================== ---- mumps.orig/Makefile -+++ mumps/Makefile -@@ -54,7 +54,7 @@ - multi_example: s d c z - (cd examples ; $(MAKE) multi) - --requiredobj: Makefile.inc $(LIBSEQNEEDED) $(libdir)/libpord$(PLAT)$(LIBEXT) -+requiredobj: Makefile.inc $(LIBSEQNEEDED) $(libdir)/libpord$(PLAT).a $(libdir)/libpord$(PLAT).so - - # dummy MPI library (sequential version) - -@@ -62,19 +62,26 @@ - (cd libseq; $(MAKE)) - - # Build the libpord.a library and copy it into $(topdir)/lib --$(libdir)/libpord$(PLAT)$(LIBEXT): -+$(libdir)/libpord$(PLAT).a: - if [ "$(LPORDDIR)" != "" ] ; then \ - cd $(LPORDDIR); \ - $(MAKE) CC="$(CC)" CFLAGS="$(OPTC)" AR="$(AR)" RANLIB="$(RANLIB)" OUTC="$(OUTC)" LIBEXT=$(LIBEXT); \ - fi; - if [ "$(LPORDDIR)" != "" ] ; then \ -- cp $(LPORDDIR)/libpord$(LIBEXT) $@; \ -+ cp $(LPORDDIR)/libpord$(PLAT).a $@; \ - fi; - -+$(libdir)/libpord$(PLAT).so: -+ if [ "$(LPORDDIR)" != "" ] ; then \ -+ cd $(LPORDDIR); make CC="$(CC)" CFLAGS="$(OPTC)" AR="$(AR)" ARFUNCT= RANLIB="$(RANLIB)" libpord$(PLAT).so; fi; -+ if [ "$(LPORDDIR)" != "" ] ; then \ -+ cp -a $(LPORDDIR)/libpord*.so lib/; fi; -+ -+ - clean: - (cd src; $(MAKE) clean) - (cd examples; $(MAKE) clean) -- (cd $(libdir); $(RM) *$(PLAT)$(LIBEXT)) -+ (cd $(libdir); $(RM) *$(PLAT).a *$(PLAT).so) - (cd libseq; $(MAKE) clean) - if [ "$(LPORDDIR)" != "" ] ; then \ - cd $(LPORDDIR); $(MAKE) realclean; \ diff --git a/Golden_Repo/m/MUST/MUST-1.6-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/m/MUST/MUST-1.6-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index 869dc42fb50939773b1afc54cdca8e2c92aa9d81..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/MUST-1.6-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,56 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# Authors:: Benedikt Steinbusch <b.steinbusch@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'CMakeMake' - -name = "MUST" -version = "1.6" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://hpc.rwth-aachen.de/must/' -description = """MUST detects usage errors of the Message Passing Interface (MPI) and reports them -to the user. As MPI calls are complex and usage errors common, this functionality is extremely helpful -for application developers that want to develop correct MPI applications. This includes errors that -already manifest --segmentation faults or incorrect results -- as well as many errors that are not -visible to the application developer or do not manifest on a certain system or MPI implementation. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['https://hpc.rwth-aachen.de/must/files/'] -sources = ['%(name)s-v%(version)s.tar.gz'] - -patches = [ - 'flags_cmake.patch', - 'wrap-config.cmake.in.patch', -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('Graphviz', '2.44.1'), - ('libxml2', '2.9.10'), - ('Python', '3.8.5'), -] - -configopts = ' -DCMAKE_BUILD_TYPE=Release -DPython_ROOT_DIR="$EBROOTPYTHON" ' - -install_cmd = 'make install install-prebuilds' - -sanity_check_paths = { - 'files': ["bin/mustrun", "include/mustConfig.h"], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/MUST/MUST-1.6-iimpi-2020-Python-3.8.5.eb b/Golden_Repo/m/MUST/MUST-1.6-iimpi-2020-Python-3.8.5.eb deleted file mode 100644 index a5380b46d70a7f93f688c387d00de556a37bf592..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/MUST-1.6-iimpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,56 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# Authors:: Benedikt Steinbusch <b.steinbusch@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'CMakeMake' - -name = "MUST" -version = "1.6" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://hpc.rwth-aachen.de/must/' -description = """MUST detects usage errors of the Message Passing Interface (MPI) and reports them -to the user. As MPI calls are complex and usage errors common, this functionality is extremely helpful -for application developers that want to develop correct MPI applications. This includes errors that -already manifest --segmentation faults or incorrect results -- as well as many errors that are not -visible to the application developer or do not manifest on a certain system or MPI implementation. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = ['https://hpc.rwth-aachen.de/must/files/'] -sources = ['%(name)s-v%(version)s.tar.gz'] - -patches = [ - 'flags_cmake.patch', - 'wrap-config.cmake.in.patch', -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('Graphviz', '2.44.1'), - ('libxml2', '2.9.10'), - ('Python', '3.8.5'), -] - -configopts = ' -DCMAKE_BUILD_TYPE=Release -DPython_ROOT_DIR="$EBROOTPYTHON" ' - -install_cmd = 'make install install-prebuilds' - -sanity_check_paths = { - 'files': ["bin/mustrun", "include/mustConfig.h"], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/MUST/MUST-1.6-ipsmpi-2020-Python-3.8.5.eb b/Golden_Repo/m/MUST/MUST-1.6-ipsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index 0eded138e361b63687dad17bda2a3a677b10b805..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/MUST-1.6-ipsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,56 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# Authors:: Benedikt Steinbusch <b.steinbusch@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'CMakeMake' - -name = "MUST" -version = "1.6" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://hpc.rwth-aachen.de/must/' -description = """MUST detects usage errors of the Message Passing Interface (MPI) and reports them -to the user. As MPI calls are complex and usage errors common, this functionality is extremely helpful -for application developers that want to develop correct MPI applications. This includes errors that -already manifest --segmentation faults or incorrect results -- as well as many errors that are not -visible to the application developer or do not manifest on a certain system or MPI implementation. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = ['https://hpc.rwth-aachen.de/must/files/'] -sources = ['%(name)s-v%(version)s.tar.gz'] - -patches = [ - 'flags_cmake.patch', - 'wrap-config.cmake.in.patch', -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('Graphviz', '2.44.1'), - ('libxml2', '2.9.10'), - ('Python', '3.8.5'), -] - -configopts = ' -DCMAKE_BUILD_TYPE=Release -DPython_ROOT_DIR="$EBROOTPYTHON" ' - -install_cmd = 'make install install-prebuilds' - -sanity_check_paths = { - 'files': ["bin/mustrun", "include/mustConfig.h"], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/MUST/MUST-1.7-gompi-2021-Python-3.8.5.eb b/Golden_Repo/m/MUST/MUST-1.7-gompi-2021-Python-3.8.5.eb deleted file mode 100644 index b58ad29b232d2b4b469e587c64549b26d7ecaa4d..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/MUST-1.7-gompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,60 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# Authors:: Benedikt Steinbusch <b.steinbusch@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'CMakeMake' - -name = "MUST" -version = "1.7" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://hpc.rwth-aachen.de/must/' -description = """MUST detects usage errors of the Message Passing Interface (MPI) and reports them -to the user. As MPI calls are complex and usage errors common, this functionality is extremely helpful -for application developers that want to develop correct MPI applications. This includes errors that -already manifest --segmentation faults or incorrect results -- as well as many errors that are not -visible to the application developer or do not manifest on a certain system or MPI implementation. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'gompi', 'version': '2021'} - -source_urls = ['https://hpc.rwth-aachen.de/must/files/'] -sources = ['%(name)s-v%(version)s.tar.gz'] -checksums = [( - 'sha512', - 'e7e27ccf9b830bb22afc3792eb8dcdd491aba67a64f41a1a68856ad58f661216efd2b298819136249ab5cbb934b062073c957fa29a66ddd82d' - 'fb7f094d61f037' -)] - -patches = [ - 'wrap-config.cmake.in.patch', -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Graphviz', '2.44.1'), - ('libxml2', '2.9.10'), - ('Python', '3.8.5'), -] - -configopts = ' -DCMAKE_BUILD_TYPE=Release -DPython_ROOT_DIR="$EBROOTPYTHON" ' - -# install_cmd = 'make install install-prebuilds' - -sanity_check_paths = { - 'files': ["bin/mustrun", "include/mustConfig.h"], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/MUST/MUST-1.7-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/m/MUST/MUST-1.7-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 753c51e31cfdd48fd109e5242a99eb786cfc5d83..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/MUST-1.7-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,60 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# Authors:: Benedikt Steinbusch <b.steinbusch@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'CMakeMake' - -name = "MUST" -version = "1.7" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://hpc.rwth-aachen.de/must/' -description = """MUST detects usage errors of the Message Passing Interface (MPI) and reports them -to the user. As MPI calls are complex and usage errors common, this functionality is extremely helpful -for application developers that want to develop correct MPI applications. This includes errors that -already manifest --segmentation faults or incorrect results -- as well as many errors that are not -visible to the application developer or do not manifest on a certain system or MPI implementation. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} - -source_urls = ['https://hpc.rwth-aachen.de/must/files/'] -sources = ['%(name)s-v%(version)s.tar.gz'] -checksums = [( - 'sha512', - 'e7e27ccf9b830bb22afc3792eb8dcdd491aba67a64f41a1a68856ad58f661216efd2b298819136249ab5cbb934b062073c957fa29a66ddd82d' - 'fb7f094d61f037' -)] - -patches = [ - 'wrap-config.cmake.in.patch', -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Graphviz', '2.44.1'), - ('libxml2', '2.9.10'), - ('Python', '3.8.5'), -] - -configopts = ' -DCMAKE_BUILD_TYPE=Release -DPython_ROOT_DIR="$EBROOTPYTHON" ' - -# install_cmd = 'make install install-prebuilds' - -sanity_check_paths = { - 'files': ["bin/mustrun", "include/mustConfig.h"], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/MUST/MUST-1.7-iimpi-2021-Python-3.8.5.eb b/Golden_Repo/m/MUST/MUST-1.7-iimpi-2021-Python-3.8.5.eb deleted file mode 100644 index 66fda74fcdbae70e2d8895e1b42a8ac656d26930..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/MUST-1.7-iimpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,60 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# Authors:: Benedikt Steinbusch <b.steinbusch@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'CMakeMake' - -name = "MUST" -version = "1.7" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://hpc.rwth-aachen.de/must/' -description = """MUST detects usage errors of the Message Passing Interface (MPI) and reports them -to the user. As MPI calls are complex and usage errors common, this functionality is extremely helpful -for application developers that want to develop correct MPI applications. This includes errors that -already manifest --segmentation faults or incorrect results -- as well as many errors that are not -visible to the application developer or do not manifest on a certain system or MPI implementation. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2021'} - -source_urls = ['https://hpc.rwth-aachen.de/must/files/'] -sources = ['%(name)s-v%(version)s.tar.gz'] -checksums = [( - 'sha512', - 'e7e27ccf9b830bb22afc3792eb8dcdd491aba67a64f41a1a68856ad58f661216efd2b298819136249ab5cbb934b062073c957fa29a66ddd82d' - 'fb7f094d61f037' -)] - -patches = [ - 'wrap-config.cmake.in.patch', -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Graphviz', '2.44.1'), - ('libxml2', '2.9.10'), - ('Python', '3.8.5'), -] - -configopts = ' -DCMAKE_BUILD_TYPE=Release -DPython_ROOT_DIR="$EBROOTPYTHON" ' - -# install_cmd = 'make install install-prebuilds' - -sanity_check_paths = { - 'files': ["bin/mustrun", "include/mustConfig.h"], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/MUST/MUST-1.7-iompi-2021-Python-3.8.5.eb b/Golden_Repo/m/MUST/MUST-1.7-iompi-2021-Python-3.8.5.eb deleted file mode 100644 index 0236c1871aa5a4e3712bf60e90efab8183f20eed..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/MUST-1.7-iompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,60 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# Authors:: Benedikt Steinbusch <b.steinbusch@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'CMakeMake' - -name = "MUST" -version = "1.7" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://hpc.rwth-aachen.de/must/' -description = """MUST detects usage errors of the Message Passing Interface (MPI) and reports them -to the user. As MPI calls are complex and usage errors common, this functionality is extremely helpful -for application developers that want to develop correct MPI applications. This includes errors that -already manifest --segmentation faults or incorrect results -- as well as many errors that are not -visible to the application developer or do not manifest on a certain system or MPI implementation. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'iompi', 'version': '2021'} - -source_urls = ['https://hpc.rwth-aachen.de/must/files/'] -sources = ['%(name)s-v%(version)s.tar.gz'] -checksums = [( - 'sha512', - 'e7e27ccf9b830bb22afc3792eb8dcdd491aba67a64f41a1a68856ad58f661216efd2b298819136249ab5cbb934b062073c957fa29a66ddd82d' - 'fb7f094d61f037' -)] - -patches = [ - 'wrap-config.cmake.in.patch', -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Graphviz', '2.44.1'), - ('libxml2', '2.9.10'), - ('Python', '3.8.5'), -] - -configopts = ' -DCMAKE_BUILD_TYPE=Release -DPython_ROOT_DIR="$EBROOTPYTHON" ' - -# install_cmd = 'make install install-prebuilds' - -sanity_check_paths = { - 'files': ["bin/mustrun", "include/mustConfig.h"], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/MUST/MUST-1.7-ipsmpi-2021-Python-3.8.5.eb b/Golden_Repo/m/MUST/MUST-1.7-ipsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 568471104a877b23e7b4295614320bbb5b435839..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/MUST-1.7-ipsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,60 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# Authors:: Benedikt Steinbusch <b.steinbusch@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'CMakeMake' - -name = "MUST" -version = "1.7" -versionsuffix = "-Python-%(pyver)s" - -homepage = 'https://hpc.rwth-aachen.de/must/' -description = """MUST detects usage errors of the Message Passing Interface (MPI) and reports them -to the user. As MPI calls are complex and usage errors common, this functionality is extremely helpful -for application developers that want to develop correct MPI applications. This includes errors that -already manifest --segmentation faults or incorrect results -- as well as many errors that are not -visible to the application developer or do not manifest on a certain system or MPI implementation. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} - -source_urls = ['https://hpc.rwth-aachen.de/must/files/'] -sources = ['%(name)s-v%(version)s.tar.gz'] -checksums = [( - 'sha512', - 'e7e27ccf9b830bb22afc3792eb8dcdd491aba67a64f41a1a68856ad58f661216efd2b298819136249ab5cbb934b062073c957fa29a66ddd82d' - 'fb7f094d61f037' -)] - -patches = [ - 'wrap-config.cmake.in.patch', -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Graphviz', '2.44.1'), - ('libxml2', '2.9.10'), - ('Python', '3.8.5'), -] - -configopts = ' -DCMAKE_BUILD_TYPE=Release -DPython_ROOT_DIR="$EBROOTPYTHON" ' - -# install_cmd = 'make install install-prebuilds' - -sanity_check_paths = { - 'files': ["bin/mustrun", "include/mustConfig.h"], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/MUST/flags_cmake.patch b/Golden_Repo/m/MUST/flags_cmake.patch deleted file mode 100644 index 44302a560a5c83c44f781e699f4207584aca5779..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/flags_cmake.patch +++ /dev/null @@ -1,17 +0,0 @@ -diff -ruN MUST-v1.6-rc3.orig/externals/GTI/cmakemodules/InstallationConfiguration.cmake.in MUST-v1.6-rc3/externals/GTI/cmakemodules/InstallationConfiguration.cmake.in ---- MUST-v1.6-rc3.orig/externals/GTI/cmakemodules/InstallationConfiguration.cmake.in 2019-01-17 13:21:29.000000000 +0100 -+++ MUST-v1.6-rc3/externals/GTI/cmakemodules/InstallationConfiguration.cmake.in 2019-02-26 17:27:15.332674000 +0100 -@@ -42,10 +42,9 @@ - SET (MPI_Fortran_LINK_FLAGS "@MPI_Fortran_LINK_FLAGS@" CACHE PATH "" FORCE) - SET (MPI_Fortran_LIBRARIES @MPI_Fortran_LIBRARIES@ CACHE PATH "" FORCE) - --SET (CMAKE_CXX_FLAGS @CMAKE_CXX_FLAGS@ CACHE STRING "" FORCE) --SET (CMAKE_C_FLAGS @CMAKE_C_FLAGS@ CACHE STRING "" FORCE) --SET (CMAKE_Fortran_FLAGS @CMAKE_Fortran_FLAGS@ CACHE STRING "" FORCE) --STRING(REPLACE ";" " " CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") -+SET (CMAKE_CXX_FLAGS "@CMAKE_CXX_FLAGS@" CACHE STRING "" FORCE) -+SET (CMAKE_C_FLAGS "@CMAKE_C_FLAGS@" CACHE STRING "" FORCE) -+SET (CMAKE_Fortran_FLAGS "@CMAKE_Fortran_FLAGS@" CACHE STRING "" FORCE) - - SET (MPIEXEC "@MPIEXEC@" CACHE PATH "" FORCE) - SET (MPIEXEC_NUMPROC_FLAG "@MPIEXEC_NUMPROC_FLAG@" CACHE PATH "" FORCE) diff --git a/Golden_Repo/m/MUST/wrap-config.cmake.in.patch b/Golden_Repo/m/MUST/wrap-config.cmake.in.patch deleted file mode 100644 index 5240d39fd4df30ca52548defd727f9a7bece8c47..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MUST/wrap-config.cmake.in.patch +++ /dev/null @@ -1,90 +0,0 @@ ---- externals/wrap/wrap-config.cmake.in.orig 2020-08-26 21:13:57.739359000 +0200 -+++ externals/wrap/wrap-config.cmake.in 2020-08-26 21:05:11.535717000 +0200 -@@ -39,9 +39,9 @@ - - # Play nice with FindPythonInterp -- use the interpreter if it was found, - # otherwise use the script directly. -- find_package(PythonInterp 2.6) -- if (PYTHON_EXECUTABLE) -- set(command ${PYTHON_EXECUTABLE}) -+ find_package(Python) -+ if (Python_EXECUTABLE) -+ set(command ${Python_EXECUTABLE}) - set(script_arg ${Wrap_EXECUTABLE}) - else() - set(command ${Wrap_EXECUTABLE}) ---- externals/GTI/externals/wrap/wrap-config.cmake.in.orig 2020-08-26 21:12:21.579754000 +0200 -+++ externals/GTI/externals/wrap/wrap-config.cmake.in 2020-08-26 21:14:34.723726000 +0200 -@@ -39,9 +39,9 @@ - - # Play nice with FindPythonInterp -- use the interpreter if it was found, - # otherwise use the script directly. -- find_package(PythonInterp 2.6) -- if (PYTHON_EXECUTABLE) -- set(command ${PYTHON_EXECUTABLE}) -+ find_package(Python) -+ if (Python_EXECUTABLE) -+ set(command ${Python_EXECUTABLE}) - set(script_arg ${Wrap_EXECUTABLE}) - else() - set(command ${Wrap_EXECUTABLE}) ---- externals/GTI/externals/PnMPI/externals/wrap/wrap-config.cmake.in.orig 2020-08-26 21:13:22.408675000 +0200 -+++ externals/GTI/externals/PnMPI/externals/wrap/wrap-config.cmake.in 2020-08-26 21:00:55.200196000 +0200 -@@ -39,9 +39,9 @@ - - # Play nice with FindPythonInterp -- use the interpreter if it was found, - # otherwise use the script directly. -- find_package(PythonInterp 2.6) -- if (PYTHON_EXECUTABLE) -- set(command ${PYTHON_EXECUTABLE}) -+ find_package(Python) -+ if (Python_EXECUTABLE) -+ set(command ${Python_EXECUTABLE}) - set(script_arg ${Wrap_EXECUTABLE}) - else() - set(command ${Wrap_EXECUTABLE}) ---- externals/wrap/WrapConfig.cmake.orig 2021-05-27 10:35:39.000000000 +0200 -+++ externals/wrap/WrapConfig.cmake 2021-05-27 10:36:23.000000000 +0200 -@@ -29,9 +29,9 @@ - - # Play nice with FindPythonInterp -- use the interpreter if it was found, - # otherwise use the script directly. -- find_package(PythonInterp 2.6) -- if (PYTHON_EXECUTABLE) -- set(command ${PYTHON_EXECUTABLE}) -+ find_package(Python) -+ if (Python_EXECUTABLE) -+ set(command ${Python_EXECUTABLE}) - set(script_arg ${Wrap_EXECUTABLE}) - else() - set(command ${Wrap_EXECUTABLE}) ---- externals/GTI/externals/wrap/WrapConfig.cmake.orig 2021-05-27 10:35:39.000000000 +0200 -+++ externals/GTI/externals/wrap/WrapConfig.cmake 2021-05-27 10:36:23.000000000 +0200 -@@ -29,9 +29,9 @@ - - # Play nice with FindPythonInterp -- use the interpreter if it was found, - # otherwise use the script directly. -- find_package(PythonInterp 2.6) -- if (PYTHON_EXECUTABLE) -- set(command ${PYTHON_EXECUTABLE}) -+ find_package(Python) -+ if (Python_EXECUTABLE) -+ set(command ${Python_EXECUTABLE}) - set(script_arg ${Wrap_EXECUTABLE}) - else() - set(command ${Wrap_EXECUTABLE}) ---- externals/GTI/externals/PnMPI/externals/wrap/WrapConfig.cmake.orig 2021-05-27 10:35:39.000000000 +0200 -+++ externals/GTI/externals/PnMPI/externals/wrap/WrapConfig.cmake 2021-05-27 10:36:23.000000000 +0200 -@@ -29,9 +29,9 @@ - - # Play nice with FindPythonInterp -- use the interpreter if it was found, - # otherwise use the script directly. -- find_package(PythonInterp 2.6) -- if (PYTHON_EXECUTABLE) -- set(command ${PYTHON_EXECUTABLE}) -+ find_package(Python) -+ if (Python_EXECUTABLE) -+ set(command ${Python_EXECUTABLE}) - set(script_arg ${Wrap_EXECUTABLE}) - else() - set(command ${Wrap_EXECUTABLE}) diff --git a/Golden_Repo/m/Mercurial/Mercurial-5.5.2-GCCcore-10.3.0-Python-3.8.15.eb b/Golden_Repo/m/Mercurial/Mercurial-5.5.2-GCCcore-10.3.0-Python-3.8.15.eb deleted file mode 100644 index d803adfee1f75d2592529f79bc952df36ed4acf5..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/Mercurial/Mercurial-5.5.2-GCCcore-10.3.0-Python-3.8.15.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = "PythonPackage" - -name = 'Mercurial' -version = '5.5.2' -versionsuffix = "-Python-%(pyver)s" - -homepage = 'http://mercurial.selenic.com/' -description = """Mercurial is a free, distributed source control management tool. It efficiently handles projects -of any size and offers an easy and intuitive interface. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -source_urls = ['http://mercurial-scm.org/release/'] -sources = [SOURCELOWER_TAR_GZ] - -download_dep_fail = True - -builddependencies = [('binutils', '2.36.1')] - -dependencies = [ - ('Python', '3.8.5') -] - -sanity_check_paths = { - 'files': ['bin/hg'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mercurial'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/m/Mercurial/Mercurial-5.5.2-GCCcore-9.3.0-Python-3.8.15.eb b/Golden_Repo/m/Mercurial/Mercurial-5.5.2-GCCcore-9.3.0-Python-3.8.15.eb deleted file mode 100644 index 09b0a9ec24f913b12a6728555be1cb87405f1a03..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/Mercurial/Mercurial-5.5.2-GCCcore-9.3.0-Python-3.8.15.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = "PythonPackage" - -name = 'Mercurial' -version = '5.5.2' -versionsuffix = "-Python-%(pyver)s" - -homepage = 'http://mercurial.selenic.com/' -description = """Mercurial is a free, distributed source control management tool. It efficiently handles projects -of any size and offers an easy and intuitive interface. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -source_urls = ['http://mercurial-scm.org/release/'] -sources = [SOURCELOWER_TAR_GZ] - -download_dep_fail = True - -builddependencies = [('binutils', '2.34')] - -dependencies = [ - ('Python', '3.8.5') -] - -sanity_check_paths = { - 'files': ['bin/hg'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mercurial'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/m/Meson/Meson-0.55.0-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/m/Meson/Meson-0.55.0-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 0c4b37e2518e9fe23bc9cfcba2e7f72f99413a8c..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/Meson/Meson-0.55.0-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'Meson' -version = '0.55.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://mesonbuild.com' -description = "Meson is a cross-platform build system designed to be both as fast and as user friendly as possible." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Ninja', '1.10.0'), -] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -options = {'modulename': 'mesonbuild'} - -sanity_check_paths = { - 'files': ['bin/meson'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/m/Meson/Meson-0.55.0-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/m/Meson/Meson-0.55.0-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 6a5d6f2252c8ecef8c3a91bca45114e9915f4edc..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/Meson/Meson-0.55.0-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'Meson' -version = '0.55.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://mesonbuild.com' -description = "Meson is a cross-platform build system designed to be both as fast and as user friendly as possible." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [PYPI_LOWER_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Ninja', '1.10.0'), -] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -options = {'modulename': 'mesonbuild'} - -sanity_check_paths = { - 'files': ['bin/meson'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/m/MethPipe/MethPipe-4.1.1-gpsmkl-2020.eb b/Golden_Repo/m/MethPipe/MethPipe-4.1.1-gpsmkl-2020.eb deleted file mode 100644 index 61936a24137caae25c16a89d7ed1480345474990..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/MethPipe/MethPipe-4.1.1-gpsmkl-2020.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'MethPipe' -version = '4.1.1' - -homepage = 'http://smithlab.usc.edu/methpipe/' -description = """The MethPipe software package is a computational pipeline for - analyzing bisulfite sequencing data (BS-seq, WGBS and RRBS). -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'cstd': 'c++11'} - -source_urls = ['https://github.com/smithlabcode/methpipe/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [ - ('GSL', '2.6'), - ('zlib', '1.2.11'), -] - -local_libs = '"-L$EBROOTGSL/lib -lgsl -lgslcblas -L$EBROOTZLIB/lib -lz"' -buildopts = 'all LIBS=%s CC=$CC CXX=$CXX CFLAGS="$CFLAGS" ' % local_libs -buildopts += 'CXXFLAGS="$CXXFLAGS" LDFLAGS="$LDFLAGS" ' - -installopts = 'LIBS=%s' % local_libs - -# removed "hmr_plant" "rmapbs-pe" "rmapbs" from sanity check -- -# do not seem to be a make target in vers >=3.3.1 - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["allelicmeth", "amrfinder", "amrtester", - "bsrate", "dmr", "duplicate-remover", - "hmr", "lc_approx", "levels", - "merge-bsrate", "merge-methcounts", - "methcounts", "methdiff", "methstates", - "pmd", "roimethstat"]], - 'dirs': ['bin', 'include', 'lib'], -} - -moduleclass = 'bio' diff --git a/Golden_Repo/m/magma/magma-2.5.4-example.patch b/Golden_Repo/m/magma/magma-2.5.4-example.patch deleted file mode 100644 index b5217aea87ec018912ae55609814d5bf1c8d2d60..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/magma/magma-2.5.4-example.patch +++ /dev/null @@ -1,76 +0,0 @@ ---- magma-2.5.4/make.check-openblas 2020-10-09 05:05:07.000000000 +0200 -+++ magma-2.5.4_ok/make.check-openblas 2020-11-17 16:36:48.088614000 +0100 -@@ -9,3 +9,4 @@ - ifeq ($(wildcard $(OPENBLASDIR)),) - $(error $$OPENBLASDIR=$(OPENBLASDIR) does not exist. Please set $$OPENBLASDIR to where OPENBLAS is installed.) - endif -+ ---- magma-2.5.4/example/Makefile 2020-10-09 05:05:05.000000000 +0200 -+++ magma-2.5.4_ok/example/Makefile 2020-11-19 11:06:33.627800000 +0100 -@@ -6,9 +6,9 @@ - # Paths where MAGMA, CUDA, and OpenBLAS are installed. - # MAGMADIR can be .. to test without installing. - #MAGMADIR ?= .. --MAGMADIR ?= /usr/local/magma --CUDADIR ?= /usr/local/cuda --OPENBLASDIR ?= /usr/local/openblas -+MAGMADIR = $(EBROOTMAGMA) -+CUDADIR = $(EBROOTCUDA) -+#OPENBLASDIR ?= /usr/local/openblas - - CC = gcc - FORT = gfortran -@@ -20,28 +20,28 @@ - - # ---------------------------------------- - # Flags and paths to MAGMA, CUDA, and LAPACK/BLAS --# MAGMA_CFLAGS := -DADD_ \ --# -I$(MAGMADIR)/include \ --# -I$(MAGMADIR)/sparse/include \ --# -I$(CUDADIR)/include --# --# MAGMA_F90FLAGS := -Dmagma_devptr_t="integer(kind=8)" \ --# -I$(MAGMADIR)/include --# -+MAGMA_CFLAGS := -DADD_ \ -+ -I$(MAGMADIR)/include \ -+ -I$(MAGMADIR)/sparse/include \ -+ -I$(CUDADIR)/include -+ -+MAGMA_F90FLAGS := -Dmagma_devptr_t="integer(kind=8)" \ -+ -I$(MAGMADIR)/include -+ - # # may be lib instead of lib64 on some systems --# MAGMA_LIBS := -L$(MAGMADIR)/lib -lmagma_sparse -lmagma \ --# -L$(CUDADIR)/lib64 -lcublas -lcudart -lcusparse \ --# -L$(OPENBLASDIR)/lib -lopenblas -+MAGMA_LIBS := -L$(MAGMADIR)/lib -lmagma_sparse -lmagma \ -+ -L$(CUDADIR)/lib64 -lcublas -lcudart -lcusparse \ -+ -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lm - - - # ---------------------------------------- - # Alternatively, using pkg-config (see README.txt): --MAGMA_CFLAGS := $(shell pkg-config --cflags magma) -+#MAGMA_CFLAGS := $(shell pkg-config --cflags magma) - --MAGMA_F90FLAGS := -Dmagma_devptr_t="integer(kind=8)" \ -- $(shell pkg-config --cflags-only-I magma) -+#MAGMA_F90FLAGS := -Dmagma_devptr_t="integer(kind=8)" \ -+# $(shell pkg-config --cflags-only-I magma) - --MAGMA_LIBS := $(shell pkg-config --libs magma) -+#MAGMA_LIBS := $(shell pkg-config --libs magma) - - - # ---------------------------------------- -@@ -88,5 +88,8 @@ - %.o: %.F90 - $(FORT) $(F90FLAGS) $(MAGMA_F90FLAGS) -c -o $@ $< - --example_f: example_f.o -+fortran.o: $(CUDADIR)/src/fortran.c -+ $(CC) $(CFLAGS) $(MAGMA_CFLAGS) -DCUBLAS_GFORTRAN -c -o $@ $< -+ -+example_f: example_f.o fortran.o - $(FORT) $(LDFLAGS) -o $@ $^ $(MAGMA_LIBS) diff --git a/Golden_Repo/m/magma/magma-2.5.4-fix-makefile.patch b/Golden_Repo/m/magma/magma-2.5.4-fix-makefile.patch deleted file mode 100644 index 41b0d04fdd070e7da2786d39a25692a2c51e984e..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/magma/magma-2.5.4-fix-makefile.patch +++ /dev/null @@ -1,38 +0,0 @@ ---- magma-2.5.4/Makefile 2020-10-09 05:06:22.000000000 +0200 -+++ magma-2.5.4_ok/Makefile 2020-11-17 16:28:31.398644075 +0100 -@@ -10,7 +10,7 @@ - CC ?= cc - CXX ?= c++ - NVCC ?= nvcc --FORT ?= -+FORT ?= $(FC) - ifeq ($(FORT),) - $(warning No Fortran compiler was given in FORT in make.inc. Some testers will not be able to check their results.) - endif -@@ -23,17 +23,17 @@ - #FPIC = -fPIC - - # may want -std=c99 for CFLAGS, -std=c++11 for CXXFLAGS --CFLAGS ?= -O3 $(FPIC) -DADD_ -Wall -MMD --CXXFLAGS ?= $(CFLAGS) -std=c++11 --NVCCFLAGS ?= -O3 -DADD_ -Xcompiler "$(FPIC) -Wall -Wno-unused-function" -std=c++11 --FFLAGS ?= -O3 $(FPIC) -DADD_ -Wall -Wno-unused-dummy-argument --F90FLAGS ?= -O3 $(FPIC) -DADD_ -Wall -Wno-unused-dummy-argument --LDFLAGS ?= -O3 $(FPIC) -+CFLAGS := -O3 $(FPIC) -DADD_ -Wall -MMD $(CFLAGS) -+CXXFLAGS := $(CFLAGS) -std=c++11 -+NVCCFLAGS := -O3 -DADD_ -Xcompiler "$(FPIC) -Wall -Wno-unused-function" -std=c++11 $(CFLAGS) -+FFLAGS := -O3 $(FPIC) -DADD_ -Wall -Wno-unused-dummy-argument $(FCFLAGS) -+F90FLAGS := -O3 $(FPIC) -DADD_ -Wall -Wno-unused-dummy-argument $(F90FLAGS) -+LDFLAGS := -O3 $(FPIC) -fopenmp $(LDFLAGS) - --INC ?= -I$(CUDADIR)/include -+INC := -I$(CUDADIR)/include - --LIBDIR ?= -L$(CUDADIR)/lib64 --LIB ?= -lcudart -lcudadevrt -lcublas -lcusparse -llapack -lblas -lpthread -lm -+LIBDIR := -L$(CUDADIR)/lib64 -+LIB := -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lcudart -lcudadevrt -lcublas -lcusparse -lpthread -lm -ldl $(LIBS) - - GPU_TARGET ?= Kepler Maxwell Pascal - diff --git a/Golden_Repo/m/magma/magma-2.5.4-gcccoremkl-10.3.0-2021.2.0.eb b/Golden_Repo/m/magma/magma-2.5.4-gcccoremkl-10.3.0-2021.2.0.eb deleted file mode 100644 index 5d6b3ea7a1f48a39efd870b03e9d5278934b916a..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/magma/magma-2.5.4-gcccoremkl-10.3.0-2021.2.0.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = "CMakeMake" - -name = 'magma' -version = '2.5.4' - -homepage = 'https://icl.cs.utk.edu/magma/' -description = """The MAGMA project aims to develop a dense linear algebra library similar to - LAPACK but for heterogeneous/hybrid architectures, starting with current Multicore+GPU systems.""" - -site_contacts = 'Inge Gutheil <i.gutheil@fz-juelich.de>' -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True, 'openmp': True} - -source_urls = ['https://icl.cs.utk.edu/projectsfiles/magma/downloads/'] -sources = [SOURCE_TAR_GZ] -patches = [ - ('%(name)s-%(version)s-fix-makefile.patch'), - ('%(name)s-%(version)s-example.patch'), -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('CUDA', '11.3', '', SYSTEM), -] - -# make sure both static and shared libs are built -configopts = [ - '-DBUILD_SHARED_LIBS=%s -DGPU_TARGET="Volta Ampere" ' % (local_shared) for local_shared in ('ON', 'OFF') -] - -installopts = 'prefix=%(installdir)s' - -postinstallcmds = [ - "cp -r %(builddir)s/magma-%(version)s/example %(installdir)s/example", -] - -sanity_check_paths = { - 'files': ['lib/libmagma.so', 'lib/libmagma.a'], - 'dirs': ['include'], -} - -modextravars = { - 'MAGMA_ROOT': '%(installdir)s', - 'MAGMA_LIB': '%(installdir)s/lib', - 'MAGMA_INCLUDE': '%(installdir)s/include/', - 'MAGMADIR': '%(installdir)s', -} - -moduleclass = 'math' diff --git a/Golden_Repo/m/magma/magma-2.5.4-gcccoremkl-9.3.0-2020.2.254.eb b/Golden_Repo/m/magma/magma-2.5.4-gcccoremkl-9.3.0-2020.2.254.eb deleted file mode 100644 index f4def944cc3a11eb0bd00343ed7a9211df35b8b0..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/magma/magma-2.5.4-gcccoremkl-9.3.0-2020.2.254.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = "CMakeMake" - -name = 'magma' -version = '2.5.4' - -homepage = 'https://icl.cs.utk.edu/magma/' -description = """The MAGMA project aims to develop a dense linear algebra library similar to - LAPACK but for heterogeneous/hybrid architectures, starting with current Multicore+GPU systems.""" - -site_contacts = 'Inge Gutheil <i.gutheil@fz-juelich.de>' -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True, 'openmp': True} - -source_urls = ['https://icl.cs.utk.edu/projectsfiles/magma/downloads/'] -sources = [SOURCE_TAR_GZ] -patches = [ - ('%(name)s-%(version)s-fix-makefile.patch'), -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -# make sure both static and shared libs are built -configopts = [ - '-DBUILD_SHARED_LIBS=%s -DGPU_TARGET="Volta Ampere" ' % (local_shared) for local_shared in ('ON', 'OFF') -] - -installopts = 'prefix=%(installdir)s' - -sanity_check_paths = { - 'files': ['lib/libmagma.so', 'lib/libmagma.a'], - 'dirs': ['include'], -} - -modextravars = { - 'MAGMA_ROOT': '%(installdir)s', - 'MAGMA_LIB': '%(installdir)s/lib', - 'MAGMA_INCLUDE': '%(installdir)s/include/', - 'MAGMADIR': '%(installdir)s', -} - -moduleclass = 'math' diff --git a/Golden_Repo/m/meld/meld-3.21.0-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/m/meld/meld-3.21.0-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index b4dea9871246994c6d7d78781fe6f6d8b1d9d046..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/meld/meld-3.21.0-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'meld' -version = '3.21.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://meldmerge.org' -description = """ - Meld is a visual diff and merge tool targeted at developers. Meld helps you compare files, directories, - and version controlled projects -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -download_dep_fail = True - -builddependencies = [ - ('intltool', '0.51.0', '-Perl-5.32.0'), - ('itstool', '2.0.6'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PyGObject', '3.34.0', versionsuffix), - ('GTK+', '3.24.17'), - ('GtkSourceView', '4.4.0'), - ('gsettings-desktop-schemas', '3.34.0'), - ('GLib', '2.64.4'), -] - -modextrapaths = { - 'XDG_DATA_DIRS': 'share', -} - -moduleclass = 'tools' diff --git a/Golden_Repo/m/meld/meld-3.21.0-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/m/meld/meld-3.21.0-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 3dc4dc168f506873d563ca326aca6ea70867e07f..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/meld/meld-3.21.0-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'meld' -version = '3.21.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://meldmerge.org' -description = """ - Meld is a visual diff and merge tool targeted at developers. Meld helps you compare files, directories, - and version controlled projects -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -download_dep_fail = True - -builddependencies = [ - ('intltool', '0.51.0', '-Perl-5.32.0'), - ('itstool', '2.0.6'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PyGObject', '3.34.0', versionsuffix), - ('GTK+', '3.24.17'), - ('GtkSourceView', '4.4.0'), - ('gsettings-desktop-schemas', '3.34.0'), - ('GLib', '2.64.4'), -] - -modextrapaths = { - 'XDG_DATA_DIRS': 'share', -} - -moduleclass = 'tools' diff --git a/Golden_Repo/m/memkind/memkind-1.10.1-GCCcore-10.3.0.eb b/Golden_Repo/m/memkind/memkind-1.10.1-GCCcore-10.3.0.eb deleted file mode 100644 index 1f4648b58050a6a7f7caa04f838f7d7bc5e09a6e..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/memkind/memkind-1.10.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'memkind' -version = '1.10.1' - -homepage = 'http://memkind.github.io' -description = """User Extensible Heap Manager built on top of jemalloc which enables control of memory characteristics -and a partitioning of the heap between kinds of memory. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/memkind/memkind/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Coreutils', '8.32'), # needed to ensure that ./build_jemalloc.sh works properly - ('Autotools', '20200321'), # needed to ensure that ./build_jemalloc.sh works properly -] - -dependencies = [ - ('numactl', '2.0.13', '', SYSTEM), - ('tbb', '2020.3'), # optional, to enable the tbb heap manager -] - -# The build_jemalloc.sh is gone, autogen should take care of it. -preconfigopts = './autogen.sh && ' - -moduleclass = 'lib' diff --git a/Golden_Repo/m/memkind/memkind-1.10.1-GCCcore-9.3.0.eb b/Golden_Repo/m/memkind/memkind-1.10.1-GCCcore-9.3.0.eb deleted file mode 100644 index f4fd519e307b292b66be791125efc7c0b2d42075..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/memkind/memkind-1.10.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'memkind' -version = '1.10.1' - -homepage = 'http://memkind.github.io' -description = """User Extensible Heap Manager built on top of jemalloc which enables control of memory characteristics -and a partitioning of the heap between kinds of memory. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/memkind/memkind/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('Coreutils', '8.32'), # needed to ensure that ./build_jemalloc.sh works properly - ('Autotools', '20200321'), # needed to ensure that ./build_jemalloc.sh works properly -] - -dependencies = [ - ('numactl', '2.0.13', '', SYSTEM), - ('tbb', '2020.3'), # optional, to enable the tbb heap manager -] - -# The build_jemalloc.sh is gone, autogen should take care of it. -preconfigopts = './autogen.sh && ' - -moduleclass = 'lib' diff --git a/Golden_Repo/m/motif/motif-2.3.8-GCCcore-10.3.0.eb b/Golden_Repo/m/motif/motif-2.3.8-GCCcore-10.3.0.eb deleted file mode 100644 index b724d0798f243b95d2b0da22dfd0df7207514691..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/motif/motif-2.3.8-GCCcore-10.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'motif' -version = '2.3.8' - -homepage = 'https://motif.ics.com/' -description = """Motif refers to both a graphical user interface (GUI) specification and the widget toolkit for building - applications that follow that specification under the X Window System on Unix and other POSIX-compliant systems. - It was the standard toolkit for the Common Desktop Environment and thus for Unix. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [SOURCEFORGE_SOURCE] - -dependencies = [ - ('freetype', '2.10.1'), - ('libjpeg-turbo', '2.0.5'), - ('X11', '20200222'), -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), - ('Autotools', '20200321'), - ('flex', '2.6.4'), - ('Bison', '3.7.6'), - ('util-linux', '2.36'), -] - -# makefile is not parallel safe -parallel = 1 - -sanity_check_paths = { - 'files': ['lib/libMrm.a', 'lib/libUil.a', 'lib/libXm.a', 'bin/mwm', 'bin/uil', 'bin/xmbind'], - 'dirs': ['include/Mrm', 'include/uil', 'include/X11', 'include/Xm'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/m/motif/motif-2.3.8-GCCcore-9.3.0.eb b/Golden_Repo/m/motif/motif-2.3.8-GCCcore-9.3.0.eb deleted file mode 100644 index 0eff9df6ccb1d4eeee7793ca60fa5464757ee237..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/motif/motif-2.3.8-GCCcore-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'motif' -version = '2.3.8' - -homepage = 'https://motif.ics.com/' -description = """Motif refers to both a graphical user interface (GUI) specification and the widget toolkit for building - applications that follow that specification under the X Window System on Unix and other POSIX-compliant systems. - It was the standard toolkit for the Common Desktop Environment and thus for Unix. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [SOURCEFORGE_SOURCE] - -dependencies = [ - ('freetype', '2.10.1'), - ('libjpeg-turbo', '2.0.5'), - ('X11', '20200222'), -] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), - ('Autotools', '20200321'), - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('util-linux', '2.36'), -] - -# makefile is not parallel safe -parallel = 1 - -sanity_check_paths = { - 'files': ['lib/libMrm.a', 'lib/libUil.a', 'lib/libXm.a', 'bin/mwm', 'bin/uil', 'bin/xmbind'], - 'dirs': ['include/Mrm', 'include/uil', 'include/X11', 'include/Xm'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gompi-2020-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gompi-2020-Python-3.8.5.eb deleted file mode 100644 index 425ebec809240c7feda580755bb9ef23074e1748..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gompi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gompi-2021-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gompi-2021-Python-3.8.5.eb deleted file mode 100644 index cea2736dc391deebbc621b9e263441adf57a43a0..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index e04c189589980d5b857b4a198c50befbe1421a84..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 0fff75cf044006147d7d85156f029dc12f6cbe9d..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iimpi-2020-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iimpi-2020-Python-3.8.5.eb deleted file mode 100644 index 403d9b55a92573b407c346d59092cbc5d3d4ff38..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iimpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iimpi-2021-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iimpi-2021-Python-3.8.5.eb deleted file mode 100644 index 60a94ce3d7d36f72748951d44b9b72cfe0f6108d..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iimpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iompi-2020-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iompi-2020-Python-3.8.5.eb deleted file mode 100644 index a047689c7a499148c6dfdb36da3811e0e47867c1..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iompi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iompi-2021-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iompi-2021-Python-3.8.5.eb deleted file mode 100644 index b4e4b538492474fb7f9ddc755e0f43b0a47f62c7..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-iompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2020-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index f0bc06463a35f6de9a2d5e41a9b1cdac21b312a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2020-mt-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2020-mt-Python-3.8.5.eb deleted file mode 100644 index 9aa9df5da18424443a1c83f9f11fc808e73686c6..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2020-mt-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2021-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 69f9666c9747d9d9ff6a6dbf7ba4aab142756da6..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-ipsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2020-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2020-Python-3.8.5.eb deleted file mode 100644 index 3b89f46699fafe978a55bfac0d479dafd6249cce..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2020-Python-3.8.5.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -prebuildopts = 'CFLAGS=-noswitcherror' - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2020.1-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2020.1-Python-3.8.5.eb deleted file mode 100644 index 7122207d4331e9724b800927a7b21129c560cef1..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2020.1-Python-3.8.5.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -prebuildopts = 'CFLAGS=-noswitcherror' - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2021-Python-3.8.5.eb b/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2021-Python-3.8.5.eb deleted file mode 100644 index ecfc94ba5b51e13c278bba1d81dd09963b3e73c1..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpi4py/mpi4py-3.0.3-npsmpic-2021-Python-3.8.5.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'mpi4py' -version = '3.0.3' - -homepage = 'https://bitbucket.org/mpi4py/mpi4py' -description = """MPI for Python (mpi4py) provides bindings of the Message Passing Interface (MPI) standard for - the Python programming language, allowing any Python program to exploit multiple processors. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2021'} - -source_urls = [BITBUCKET_DOWNLOADS] -sources = [SOURCE_TAR_GZ] - -versionsuffix = '-Python-%(pyver)s' - -dependencies = [('Python', '3.8.5')] - -prebuildopts = 'CFLAGS=-noswitcherror' - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/mpi4py'], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/m/mpiP/mpiP-3.5-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/m/mpiP/mpiP-3.5-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index c3d55372f780e7790596e6515c33a8d9fa7f8ca3..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/mpiP/mpiP-3.5-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'ConfigureMake' -version = '3.5' - -homepage = 'https://github.com/LLNL/mpiP' -name = "mpiP" - -description = """mpiP is a lightweight profiling library for MPI applications. Because it only collects statistical -information about MPI functions, mpiP generates considerably less overhead and much less data than tracing tools. All -the information captured by mpiP is task-local. It only uses communication during report generation, typically at the -end of the experiment, to merge results from all of the tasks into one output file. -""" - -usage = """ - Example usage (take special note of the order, the mpiP library has to appear AFTER your code): - - mpifort -g -o mpitest mpitest.f90 -lmpiP -lm -lbfd -liberty -lunwind -lz -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/LLNL/mpiP/releases/download/%(version)s/'] -sources = [SOURCELOWER_TGZ] - -builddependencies = [ - ('Python', '3.8.5'), -] - -dependencies = [ - ('libunwind', '1.4.0'), -] - -configopts = "--with-cc=$CC --with-cxx=$CXX --with-f77=$F77 CFLAGS='-DPACKAGE=mpiP -DPACKAGE_VERSION=3.5' " - -buildopts = "PACKAGE='mpiP' PACKAGE_VERSION='3.5'" - -sanity_check_paths = { - 'files': ['lib/libmpiP.so'], - 'dirs': ['lib', 'share'] -} - -moduleclass = 'perf' diff --git a/Golden_Repo/m/muparserx/muparserx-4.0.8-GCCcore-10.3.0.eb b/Golden_Repo/m/muparserx/muparserx-4.0.8-GCCcore-10.3.0.eb deleted file mode 100644 index 4cc7181eb898b87fb59c08d43e32b56ae220a9e1..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/muparserx/muparserx-4.0.8-GCCcore-10.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'muparserx' -version = '4.0.8' - -homepage = "https://github.com/beltoforion/muparserx" -description = """A C++ math parser library with array and string support. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/beltoforion/muparserx/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('binutils', '2.36.1') -] - -sanity_check_paths = { - 'files': [], - 'dirs': [('include', 'lib64')] -} diff --git a/Golden_Repo/m/muparserx/muparserx-4.0.8-GCCcore-9.3.0.eb b/Golden_Repo/m/muparserx/muparserx-4.0.8-GCCcore-9.3.0.eb deleted file mode 100644 index dc018600889918cc69fa06356f921ea54aba1b04..0000000000000000000000000000000000000000 --- a/Golden_Repo/m/muparserx/muparserx-4.0.8-GCCcore-9.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'muparserx' -version = '4.0.8' - -homepage = "https://github.com/beltoforion/muparserx" -description = """A C++ math parser library with array and string support. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/beltoforion/muparserx/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34') -] - -sanity_check_paths = { - 'files': [], - 'dirs': [('include', 'lib64')] -} diff --git a/Golden_Repo/n/NAMD/NAMD-2.14-gompi-2020.eb b/Golden_Repo/n/NAMD/NAMD-2.14-gompi-2020.eb deleted file mode 100644 index 82ebce2df477a4d66ff87d9cf3d598b816371cab..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NAMD/NAMD-2.14-gompi-2020.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'NAMD' -version = '2.14' - -homepage = 'http://www.ks.uiuc.edu/Research/namd/' -description = """NAMD is a parallel molecular dynamics code designed for high-performance simulation of large -biomolecular systems. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True, 'cstd': 'c++11'} - -runtest = False - -sources = ['NAMD_%(version)s_Source.tar.gz'] - -patches = ['namd-2.14-fix-gnu-casting.patch'] - -group = "namd" - -dependencies = [ - ('Tcl', '8.6.10'), - ('FFTW', '3.3.8'), -] - -charm_arch = 'mpi-linux-x86_64' - -namd_cfg_opts = " --with-tcl --tcl-prefix $EBROOTTCL " - -prebuildopts = 'echo "TCLLIB=-L\$(TCLDIR)/lib -ltcl8.6 -ldl -lpthread" >> Make.config && ' - -# In JURECA we should use srun. charmrun results in serial executions -postinstallcmds = ['rm %(installdir)s/charmrun'] - -# We must overwrite the default sanity check, otherwise if fails because it can't find charmrun -sanity_check_paths = { - 'files': ['flipbinpdb', 'flipdcd', 'namd%s' % version.split('.')[0], 'psfgen'], - 'dirs': ['inc'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/n/NAMD/NAMD-2.14-gpsmpi-2020.eb b/Golden_Repo/n/NAMD/NAMD-2.14-gpsmpi-2020.eb deleted file mode 100644 index 8e0c56d1b9d6cee0fc1566817830299edfb6dc47..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NAMD/NAMD-2.14-gpsmpi-2020.eb +++ /dev/null @@ -1,42 +0,0 @@ -name = 'NAMD' -version = '2.14' - -homepage = 'http://www.ks.uiuc.edu/Research/namd/' -description = """NAMD is a parallel molecular dynamics code designed for high-performance simulation of large -biomolecular systems. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True, 'cstd': 'c++11'} - -runtest = False - -sources = ['NAMD_%(version)s_Source.tar.gz'] - -patches = ['namd-2.14-fix-gnu-casting.patch'] - -group = "namd" - -dependencies = [ - ('Tcl', '8.6.10'), - ('FFTW', '3.3.8'), -] - -charm_arch = 'mpi-linux-x86_64' - -namd_cfg_opts = " --with-tcl --tcl-prefix $EBROOTTCL " - -prebuildopts = 'echo "TCLLIB=-L\$(TCLDIR)/lib -ltcl8.6 -ldl -lpthread" >> Make.config && ' - -# In JURECA we should use srun. charmrun results in serial executions -postinstallcmds = ['rm %(installdir)s/charmrun'] - -# We must overwrite the default sanity check, otherwise if fails because it can't find charmrun -sanity_check_paths = { - 'files': ['flipbinpdb', 'flipdcd', 'namd%s' % version.split('.')[0], 'psfgen'], - 'dirs': ['inc'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/n/NAMD/NAMD-2.14-iimpi-2020.eb b/Golden_Repo/n/NAMD/NAMD-2.14-iimpi-2020.eb deleted file mode 100644 index 9816e20a09011d974c9cabcd4c4421875074aede..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NAMD/NAMD-2.14-iimpi-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'NAMD' -version = '2.14' - -homepage = 'http://www.ks.uiuc.edu/Research/namd/' -description = """NAMD is a parallel molecular dynamics code designed for high-performance simulation of large -biomolecular systems. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True, 'cstd': 'c++11'} - -runtest = False - -sources = ['NAMD_%(version)s_Source.tar.gz'] - -patches = ['namd-2.14-fix-gnu-casting.patch'] - -group = "namd" - -dependencies = [ - ('Tcl', '8.6.10'), - ('FFTW', '3.3.8'), -] - -charm_arch = 'mpi-linux-x86_64' - -namd_cfg_opts = " --with-tcl --tcl-prefix $EBROOTTCL " - -prebuildopts = 'echo "TCLLIB=-L\$(TCLDIR)/lib -ltcl8.6 -ldl -lpthread" >> Make.config && ' -prebuildopts += 'echo "COPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' -prebuildopts += 'echo "CXXOPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' - -# In JURECA we should use srun. charmrun results in serial executions -postinstallcmds = ['rm %(installdir)s/charmrun'] - -# We must overwrite the default sanity check, otherwise if fails because it can't find charmrun -sanity_check_paths = { - 'files': ['flipbinpdb', 'flipdcd', 'namd%s' % version.split('.')[0], 'psfgen'], - 'dirs': ['inc'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/n/NAMD/NAMD-2.14-iompi-2029.eb b/Golden_Repo/n/NAMD/NAMD-2.14-iompi-2029.eb deleted file mode 100644 index c591e99c50db679d161a4a9ff8af57bcb42967ab..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NAMD/NAMD-2.14-iompi-2029.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'NAMD' -version = '2.14' - -homepage = 'http://www.ks.uiuc.edu/Research/namd/' -description = """NAMD is a parallel molecular dynamics code designed for high-performance simulation of large -biomolecular systems. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True, 'cstd': 'c++11'} - -runtest = False - -sources = ['NAMD_%(version)s_Source.tar.gz'] - -patches = ['namd-2.14-fix-gnu-casting.patch'] - -group = "namd" - -dependencies = [ - ('Tcl', '8.6.10'), - ('FFTW', '3.3.8'), -] - -charm_arch = 'mpi-linux-x86_64' - -namd_cfg_opts = " --with-tcl --tcl-prefix $EBROOTTCL " - -prebuildopts = 'echo "TCLLIB=-L\$(TCLDIR)/lib -ltcl8.6 -ldl -lpthread" >> Make.config && ' -prebuildopts += 'echo "COPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' -prebuildopts += 'echo "CXXOPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' - -# In JURECA we should use srun. charmrun results in serial executions -postinstallcmds = ['rm %(installdir)s/charmrun'] - -# We must overwrite the default sanity check, otherwise if fails because it can't find charmrun -sanity_check_paths = { - 'files': ['flipbinpdb', 'flipdcd', 'namd%s' % version.split('.')[0], 'psfgen'], - 'dirs': ['inc'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/n/NAMD/NAMD-2.14-ipsmpi-2020.eb b/Golden_Repo/n/NAMD/NAMD-2.14-ipsmpi-2020.eb deleted file mode 100644 index 3f3f6be44a31f4c6ae82644ba1926e3e2e9e3585..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NAMD/NAMD-2.14-ipsmpi-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'NAMD' -version = '2.14' - -homepage = 'http://www.ks.uiuc.edu/Research/namd/' -description = """NAMD is a parallel molecular dynamics code designed for high-performance simulation of large -biomolecular systems. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True, 'cstd': 'c++11'} - -runtest = False - -sources = ['NAMD_%(version)s_Source.tar.gz'] - -patches = ['namd-2.14-fix-gnu-casting.patch'] - -group = "namd" - -dependencies = [ - ('Tcl', '8.6.10'), - ('FFTW', '3.3.8'), -] - -charm_arch = 'mpi-linux-x86_64' - -namd_cfg_opts = " --with-tcl --tcl-prefix $EBROOTTCL " - -prebuildopts = 'echo "TCLLIB=-L\$(TCLDIR)/lib -ltcl8.6 -ldl -lpthread" >> Make.config && ' -prebuildopts += 'echo "COPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' -prebuildopts += 'echo "CXXOPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' - -# In JURECA we should use srun. charmrun results in serial executions -postinstallcmds = ['rm %(installdir)s/charmrun'] - -# We must overwrite the default sanity check, otherwise if fails because it can't find charmrun -sanity_check_paths = { - 'files': ['flipbinpdb', 'flipdcd', 'namd%s' % version.split('.')[0], 'psfgen'], - 'dirs': ['inc'], -} - -moduleclass = 'chem' diff --git a/Golden_Repo/n/NAMD/namd-2.14-fix-gnu-casting.patch b/Golden_Repo/n/NAMD/namd-2.14-fix-gnu-casting.patch deleted file mode 100644 index f0716420d533e812379070ec754870a9cf932bbd..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NAMD/namd-2.14-fix-gnu-casting.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- charm-6.10.2/src/conv-core/memoryaffinity.C.orig 2020-10-20 16:01:45.475695293 +0200 -+++ charm-6.10.2/src/conv-core/memoryaffinity.C 2020-10-20 16:02:19.481016605 +0200 -@@ -182,7 +182,7 @@ - for (i=0; i<strlen((const char *)nodemap); i++) { - if (nodemap[i]==',') nodemapArrSize++; - } -- nodemapArr = malloc(nodemapArrSize*sizeof(int)); -+ nodemapArr = (int*)malloc(nodemapArrSize*sizeof(int)); - prevIntStart=j=0; - for (i=0; i<strlen((const char *)nodemap); i++) { - if (nodemap[i]==',') { diff --git a/Golden_Repo/n/NASM/NASM-2.15.03-GCCcore-10.3.0.eb b/Golden_Repo/n/NASM/NASM-2.15.03-GCCcore-10.3.0.eb deleted file mode 100644 index 3ce99d94599b9e2d43a30402164dd85257768541..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NASM/NASM-2.15.03-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-90.html -## - -easyblock = 'ConfigureMake' - -name = 'NASM' -version = '2.15.03' - -homepage = 'http://www.nasm.us/' -description = """NASM: General-purpose x86 assembler -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_BZ2] -source_urls = ['http://www.nasm.us/pub/nasm/releasebuilds/%(version)s'] - -builddependencies = [('binutils', '2.36.1')] - -sanity_check_paths = { - 'files': ['bin/nasm'], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/n/NASM/NASM-2.15.03-GCCcore-9.3.0.eb b/Golden_Repo/n/NASM/NASM-2.15.03-GCCcore-9.3.0.eb deleted file mode 100644 index 01d3799829fdd5617ca282b0c01fea72b226e845..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NASM/NASM-2.15.03-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-90.html -## - -easyblock = 'ConfigureMake' - -name = 'NASM' -version = '2.15.03' - -homepage = 'http://www.nasm.us/' -description = """NASM: General-purpose x86 assembler -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_BZ2] -source_urls = ['http://www.nasm.us/pub/nasm/releasebuilds/%(version)s'] - -builddependencies = [('binutils', '2.34')] - -sanity_check_paths = { - 'files': ['bin/nasm'], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/n/NCCL/NCCL-2.8.3-1-GCCcore-10.3.0-CUDA-11.3.eb b/Golden_Repo/n/NCCL/NCCL-2.8.3-1-GCCcore-10.3.0-CUDA-11.3.eb deleted file mode 100644 index d560367de13de9300eed0c93ceb6215a931c6bfe..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NCCL/NCCL-2.8.3-1-GCCcore-10.3.0-CUDA-11.3.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'NCCL' -version = '2.8.3-1' -local_cudaver = '11.3' -versionsuffix = '-CUDA-%s' % local_cudaver - -homepage = 'https://developer.nvidia.com/nccl' -description = """The NVIDIA Collective Communications Library (NCCL) implements multi-GPU and multi-node collective -communication primitives that are performance optimized for NVIDIA GPUs.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/NVIDIA/nccl/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('CUDA', local_cudaver, '', SYSTEM) -] - -skipsteps = ['configure'] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - -installopts = 'PREFIX=%(installdir)s' - -sanity_check_paths = { - 'files': ['lib/libnccl.%s' % SHLIB_EXT, 'lib/libnccl_static.a', 'include/nccl.h'], - 'dirs': ['include'], -} - -# Add a property -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'system' diff --git a/Golden_Repo/n/NCCL/NCCL-2.8.3-1-GCCcore-9.3.0-CUDA-11.0.eb b/Golden_Repo/n/NCCL/NCCL-2.8.3-1-GCCcore-9.3.0-CUDA-11.0.eb deleted file mode 100644 index 2131750ddfde97861e694562ffd5726084cc76dd..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NCCL/NCCL-2.8.3-1-GCCcore-9.3.0-CUDA-11.0.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'NCCL' -version = '2.8.3-1' -local_cudaver = '11.0' -versionsuffix = '-CUDA-%s' % local_cudaver - -homepage = 'https://developer.nvidia.com/nccl' -description = """The NVIDIA Collective Communications Library (NCCL) implements multi-GPU and multi-node collective -communication primitives that are performance optimized for NVIDIA GPUs.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/NVIDIA/nccl/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('CUDA', local_cudaver, '', SYSTEM) -] - -skipsteps = ['configure'] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - -installopts = 'PREFIX=%(installdir)s' - -sanity_check_paths = { - 'files': ['lib/libnccl.%s' % SHLIB_EXT, 'lib/libnccl_static.a', 'include/nccl.h'], - 'dirs': ['include'], -} - -# Add a property -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'system' diff --git a/Golden_Repo/n/NCL/NCL-6.4.0_fix-types.patch b/Golden_Repo/n/NCL/NCL-6.4.0_fix-types.patch deleted file mode 100644 index ecdfe831e0e64bf022ecdaa0dba00e4e32a80f4e..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NCL/NCL-6.4.0_fix-types.patch +++ /dev/null @@ -1,94 +0,0 @@ -fix compilation problems due to incorrect type -author: Kenneth Hoste (HPC-UGent) ---- ni/src/lib/nfp/SCRIP2KMLW.c.orig 2017-04-21 11:22:11.523861897 +0200 -+++ ni/src/lib/nfp/SCRIP2KMLW.c 2017-04-21 11:22:17.773923222 +0200 -@@ -8,17 +8,17 @@ - NhlErrorTypes SCRIP2KML_W(void) { - /* Defining the Arguments */ - /* Argument # 0 */ -- string * scrip_filename; -+ NclQuark* scrip_filename; - char * c_scrip_filename; - int ncid; - - /* Argument # 1 */ -- string * kml_filename; -+ NclQuark* kml_filename; - char * c_kml_filename; - FILE * fid; - - /* Argument # 2 */ -- string * gridname; -+ NclQuark* gridname; - char * c_gridname; - - /* End of Defining the Arguments */ -@@ -48,7 +48,7 @@ - - /* Getting Arguments values */ - /* Argument # 0 */ -- scrip_filename = (string *) NclGetArgValue( -+ scrip_filename = (NclQuark*) NclGetArgValue( - 0, - 3, - NULL, -@@ -61,7 +61,7 @@ - c_scrip_filename = NrmQuarkToString(*scrip_filename); - - /* Argument # 1 */ -- kml_filename = (string *) NclGetArgValue( -+ kml_filename = (NclQuark*) NclGetArgValue( - 1, - 3, - NULL, -@@ -74,7 +74,7 @@ - c_kml_filename = NrmQuarkToString(*kml_filename); - - /* Argument # 1 */ -- gridname = (string *) NclGetArgValue( -+ gridname = (NclQuark*) NclGetArgValue( - 2, - 3, - NULL, -@@ -212,4 +212,4 @@ - free(grid_corner_lon); - fclose(fid); - return (NhlNOERROR); --} -\ No newline at end of file -+} ---- ni/src/lib/nfp/Unstruct2KMLW.c.orig 2017-04-21 11:20:50.403067496 +0200 -+++ ni/src/lib/nfp/Unstruct2KMLW.c 2017-04-21 11:21:34.883504667 +0200 -@@ -7,12 +7,12 @@ - NhlErrorTypes Unstruct2KML_W(void) { - /* Defining the Arguments */ - /* Argument # 0 */ -- string * filename; -+ NclQuark* filename; - char * c_filename; - FILE * fid; - - /* Argument # 1 */ -- string * gridname; -+ NclQuark* gridname; - char * c_gridname; - - /* Argument # 2 */ -@@ -50,7 +50,7 @@ - - /* Getting Arguments values */ - /* Argument # 0 */ -- filename = (string *) NclGetArgValue( -+ filename = (NclQuark*) NclGetArgValue( - 0, - 6, - NULL, -@@ -63,7 +63,7 @@ - c_filename = NrmQuarkToString(*filename); - - /* Argument # 1 */ -- gridname = (string *) NclGetArgValue( -+ gridname = (NclQuark*) NclGetArgValue( - 1, - 6, - NULL, diff --git a/Golden_Repo/n/NCL/NCL-6.6.2-gpsmkl-2020.eb b/Golden_Repo/n/NCL/NCL-6.6.2-gpsmkl-2020.eb deleted file mode 100644 index eb6205ac1bd944bfd34ee6a042b8f5e21d82a957..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NCL/NCL-6.6.2-gpsmkl-2020.eb +++ /dev/null @@ -1,40 +0,0 @@ -name = 'NCL' -version = "6.6.2" - -homepage = 'http://www.ncl.ucar.edu' -description = 'NCL is an interpreted language designed specifically for scientific data analysis and visualization.' - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'cstd': 'c99', 'openmp': True, 'pic': True} - -source_urls = ['https://github.com/NCAR/ncl/archive'] -sources = ['%(version)s.tar.gz'] - -patches = ['NCL-6.4.0_fix-types.patch'] - -dependencies = [ - ('cURL', '7.71.1'), - ('libdap', '3.20.6'), - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('freetype', '2.10.1'), - ('JasPer', '2.0.19'), - ('g2lib', '3.1.0'), - ('zlib', '1.2.11'), - ('ESMF', '8.0.1'), - ('bzip2', '1.0.8'), - ('GSL', '2.6'), - ('GDAL', '3.1.2', '-Python-3.8.5'), - ('HDF5', '1.10.6'), - ('HDF', '4.2.15'), - ('g2clib', '1.6.0'), - ('Szip', '2.1.1'), - ('UDUNITS', '2.2.26'), - ('cairo', '1.17.2'), - ('Bison', '3.6.4'), - ('flex', '2.6.4'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/n/NCL/NCL-6.6.2-intel-para-2020.eb b/Golden_Repo/n/NCL/NCL-6.6.2-intel-para-2020.eb deleted file mode 100644 index cfbc3cda68dd5412b5fd0350cb5873c5535b2e12..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NCL/NCL-6.6.2-intel-para-2020.eb +++ /dev/null @@ -1,40 +0,0 @@ -name = 'NCL' -version = "6.6.2" - -homepage = 'http://www.ncl.ucar.edu' -description = 'NCL is an interpreted language designed specifically for scientific data analysis and visualization.' - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'cstd': 'c99', 'openmp': True, 'pic': True} - -source_urls = ['https://github.com/NCAR/ncl/archive'] -sources = ['%(version)s.tar.gz'] - -patches = ['NCL-6.4.0_fix-types.patch'] - -dependencies = [ - ('cURL', '7.71.1'), - ('libdap', '3.20.6'), - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('freetype', '2.10.1'), - ('JasPer', '2.0.19'), - ('g2lib', '3.1.0'), - ('zlib', '1.2.11'), - ('ESMF', '8.0.1'), - ('bzip2', '1.0.8'), - ('GSL', '2.6'), - ('GDAL', '3.1.2', '-Python-3.8.5'), - ('HDF5', '1.10.6'), - ('HDF', '4.2.15'), - ('g2clib', '1.6.0'), - ('Szip', '2.1.1'), - ('UDUNITS', '2.2.26'), - ('cairo', '1.17.2'), - ('Bison', '3.6.4'), - ('flex', '2.6.4'), -] - -moduleclass = 'data' diff --git a/Golden_Repo/n/NCO/NCO-4.9.5-gpsmpi-2020.eb b/Golden_Repo/n/NCO/NCO-4.9.5-gpsmpi-2020.eb deleted file mode 100644 index 18b667fad77c0277ebb679667c7b6c6b9906b115..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NCO/NCO-4.9.5-gpsmpi-2020.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'NCO' -version = '4.9.5' - -homepage = 'http://nco.sourceforge.net' -description = """NCO manipulates data stored in netCDF-accessible formats, - including HDF4 and HDF5. It also exploits the geophysical expressivity of many - CF (Climate & Forecast) metadata conventions, the flexible description of - physical dimensions translated by UDUnits, the network transparency of OPeNDAP, - the storage features (e.g., compression, chunking, groups) of HDF (the - Hierarchical Data Format), and many powerful mathematical and statistical - algorithms of GSL (the GNU Scientific Library). -""" - -site_contacts = 'a.ghasemi@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True} - -configopts = '--enable-optimize-custom --enable-debug-custom ' - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://nco.sourceforge.net/src/'] - -patches = ['nco_4.9.5_yyget_gcc9.patch'] - -builddependencies = [ - ('Bison', '3.6.4'), - ('flex', '2.6.4'), -] - -dependencies = [ - ('cURL', '7.71.1'), - ('netCDF', '4.7.4'), - ('UDUNITS', '2.2.26'), - ('GSL', '2.6'), - ('expat', '2.2.9'), - # ANTLR 3.X doesn't have C++ support and therefore is not usable - # ANTLR 4.X has C++ support, but right now is not compatible with NCO - # See https://sourceforge.net/p/nco/discussion/9831/thread/1a424aac/ - ('ANTLR', '2.7.7', '-Python-3.8.5'), - ('libdap', '3.20.6'), -] - -sanity_check_paths = { - 'files': ["bin/ncks", "bin/ncrename"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/NCO/NCO-4.9.5-ipsmpi-2020.eb b/Golden_Repo/n/NCO/NCO-4.9.5-ipsmpi-2020.eb deleted file mode 100644 index ee27be10cd38066eaa960d4768939a84207cce1d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NCO/NCO-4.9.5-ipsmpi-2020.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'NCO' -version = '4.9.5' - -homepage = 'http://nco.sourceforge.net' -description = """NCO manipulates data stored in netCDF-accessible formats, - including HDF4 and HDF5. It also exploits the geophysical expressivity of many - CF (Climate & Forecast) metadata conventions, the flexible description of - physical dimensions translated by UDUnits, the network transparency of OPeNDAP, - the storage features (e.g., compression, chunking, groups) of HDF (the - Hierarchical Data Format), and many powerful mathematical and statistical - algorithms of GSL (the GNU Scientific Library). -""" - -site_contacts = 'a.ghasemi@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True} - -configopts = '--enable-optimize-custom --enable-debug-custom ' - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://nco.sourceforge.net/src/'] - -patches = ['nco_4.9.5_yyget_gcc9.patch'] - -builddependencies = [ - ('Bison', '3.6.4'), - ('flex', '2.6.4'), -] - -dependencies = [ - ('cURL', '7.71.1'), - ('netCDF', '4.7.4'), - ('UDUNITS', '2.2.26'), - ('GSL', '2.6'), - ('expat', '2.2.9'), - # ANTLR 3.X doesn't have C++ support and therefore is not usable - # ANTLR 4.X has C++ support, but right now is not compatible with NCO - # See https://sourceforge.net/p/nco/discussion/9831/thread/1a424aac/ - ('ANTLR', '2.7.7', '-Python-3.8.5'), - ('libdap', '3.20.6'), -] - -sanity_check_paths = { - 'files': ["bin/ncks", "bin/ncrename"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/NCO/nco_4.9.5_yyget_gcc9.patch b/Golden_Repo/n/NCO/nco_4.9.5_yyget_gcc9.patch deleted file mode 100644 index 6c62faa6a24e89651ccffb86b6fc70c1be1e8fe9..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NCO/nco_4.9.5_yyget_gcc9.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ruN nco-4.9.5.orig/src/nco/ncap_lex.l nco-4.9.5/src/nco/ncap_lex.l ---- nco-4.9.5.orig/src/nco/ncap_lex.l 2018-09-21 17:23:39.000000000 +0200 -+++ nco-4.9.5/src/nco/ncap_lex.l 2018-11-09 16:43:31.201995148 +0100 -@@ -107,7 +107,7 @@ - Once all platforms upgrade to Flex >= 2.6.4 we can deprecate yy_size_t code - However, MacOSX may never upgrade to newer Flex - CZ's decision is to keep current behavior until breakage cannot be ignored and then sacrifice buildability on older Flex */ -- yy_size_t yyget_leng(void); /* fixes: warning: no previous prototype for `yyget_leng' */ -+ int yyget_leng(void); /* fixes: warning: no previous prototype for `yyget_leng' */ - int yyget_lineno(void); /* fixes: warning: no previous prototype for `yyget_lineno' */ - int yylex_destroy(void); /* fixes: warning: no previous prototype for `yylex_destroy' */ - void yyset_debug(int bdebug); /* fixes: warning: no previous prototype for `yyset_debug' */ diff --git a/Golden_Repo/n/NLopt/NLopt-2.6.2-GCCcore-10.3.0.eb b/Golden_Repo/n/NLopt/NLopt-2.6.2-GCCcore-10.3.0.eb deleted file mode 100644 index 03bbbe758054c020ebbd5c017ff1de313792c588..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NLopt/NLopt-2.6.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# Author: Pablo Escobar Lopez -# Swiss Institute of Bioinformatics -# Biozentrum - University of Basel -# 2019-06-05 John Dey jfdey@fredhutch.org fizwit@github.com - updated for CMake -easyblock = 'CMakeMake' - -name = 'NLopt' -version = '2.6.2' - -homepage = 'http://ab-initio.mit.edu/wiki/index.php/NLopt' -description = """ NLopt is a free/open-source library for nonlinear -optimization, providing a common interface for a number of different free -optimization routines available online as well as original implementations -of various other algorithms. """ - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/stevengj/nlopt/archive'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('binutils', '2.36.1'), -] - -configopts = [ - '-DBUILD_SHARED_LIBS=ON', - '-DBUILD_SHARED_LIBS=OFF' -] - -sanity_check_paths = { - 'files': ['lib/libnlopt.a', 'lib/libnlopt.%s' % SHLIB_EXT, - 'include/nlopt.h'], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/n/NLopt/NLopt-2.6.2-GCCcore-9.3.0.eb b/Golden_Repo/n/NLopt/NLopt-2.6.2-GCCcore-9.3.0.eb deleted file mode 100644 index 4ef298984a4a4227fea29d98c2443719216405d4..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NLopt/NLopt-2.6.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# Author: Pablo Escobar Lopez -# Swiss Institute of Bioinformatics -# Biozentrum - University of Basel -# 2019-06-05 John Dey jfdey@fredhutch.org fizwit@github.com - updated for CMake -easyblock = 'CMakeMake' - -name = 'NLopt' -version = '2.6.2' - -homepage = 'http://ab-initio.mit.edu/wiki/index.php/NLopt' -description = """ NLopt is a free/open-source library for nonlinear -optimization, providing a common interface for a number of different free -optimization routines available online as well as original implementations -of various other algorithms. """ - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/stevengj/nlopt/archive'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34'), -] - -configopts = [ - '-DBUILD_SHARED_LIBS=ON', - '-DBUILD_SHARED_LIBS=OFF' -] - -sanity_check_paths = { - 'files': ['lib/libnlopt.a', 'lib/libnlopt.%s' % SHLIB_EXT, - 'include/nlopt.h'], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/n/NSPR/NSPR-4.25-GCCcore-10.3.0.eb b/Golden_Repo/n/NSPR/NSPR-4.25-GCCcore-10.3.0.eb deleted file mode 100644 index bcdf97e6b604bdbde2cbb12380bed157685fb27a..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NSPR/NSPR-4.25-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'NSPR' -version = '4.25' - -homepage = 'https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSPR' -description = """Netscape Portable Runtime (NSPR) provides a platform-neutral API for system level - and libc-like functions.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://archive.mozilla.org/pub/nspr/releases/v%(version)s/src/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['0bc309be21f91da4474c56df90415101c7f0c7c7cab2943cd943cd7896985256'] - -builddependencies = [('binutils', '2.36.1')] - -configopts = "--disable-debug --enable-optimize --enable-64bit" - -sanity_check_paths = { - 'files': ['bin/nspr-config', 'lib/libnspr%(version_major)s.a', 'lib/libnspr%%(version_major)s.%s' % SHLIB_EXT, - 'lib/libplc%(version_major)s.a', 'lib/libplc%%(version_major)s.%s' % SHLIB_EXT, - 'lib/libplds%(version_major)s.a', 'lib/libplds%%(version_major)s.%s' % SHLIB_EXT, - 'lib/pkgconfig/nspr.pc'], - 'dirs': ['include/nspr'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/n/NSPR/NSPR-4.25-GCCcore-9.3.0.eb b/Golden_Repo/n/NSPR/NSPR-4.25-GCCcore-9.3.0.eb deleted file mode 100644 index f2768129a9b52ec6e50b83f274dbab7405d7842e..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NSPR/NSPR-4.25-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'NSPR' -version = '4.25' - -homepage = 'https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSPR' -description = """Netscape Portable Runtime (NSPR) provides a platform-neutral API for system level - and libc-like functions.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://archive.mozilla.org/pub/nspr/releases/v%(version)s/src/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['0bc309be21f91da4474c56df90415101c7f0c7c7cab2943cd943cd7896985256'] - -builddependencies = [('binutils', '2.34')] - -configopts = "--disable-debug --enable-optimize --enable-64bit" - -sanity_check_paths = { - 'files': ['bin/nspr-config', 'lib/libnspr%(version_major)s.a', 'lib/libnspr%%(version_major)s.%s' % SHLIB_EXT, - 'lib/libplc%(version_major)s.a', 'lib/libplc%%(version_major)s.%s' % SHLIB_EXT, - 'lib/libplds%(version_major)s.a', 'lib/libplds%%(version_major)s.%s' % SHLIB_EXT, - 'lib/pkgconfig/nspr.pc'], - 'dirs': ['include/nspr'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/n/NSS/NSS-3.39_pkgconfig.patch b/Golden_Repo/n/NSS/NSS-3.39_pkgconfig.patch deleted file mode 100644 index 827396691769d412b10f2518321ed8b9627f0c41..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NSS/NSS-3.39_pkgconfig.patch +++ /dev/null @@ -1,221 +0,0 @@ -based on https://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/dev-libs/nss/files/nss-3.12.4-gentoo-fixups-1.diff - -updated/fixed for NSS 3.39 by Kenneth Hoste (HPC-UGent) - -diff -urN nss/manifest.mn nss-3.12.4/mozilla/security/nss/manifest.mn ---- nss/manifest.mn 2008-04-04 15:36:59.000000000 -0500 -+++ nss/manifest.mn 2009-09-14 21:45:45.703656167 -0500 - - RELEASE = nss - --DIRS = coreconf lib cmd cpputil gtests -+DIRS = coreconf lib cmd cpputil gtests config -diff -urN nss/config/Makefile nss-3.12.4/mozilla/security/nss/config/Makefile ---- nss/config/Makefile 1969-12-31 18:00:00.000000000 -0600 -+++ nss/config/Makefile 2009-09-14 21:45:45.619639265 -0500 -@@ -0,0 +1,40 @@ -+CORE_DEPTH = .. -+DEPTH = ../.. -+ -+include $(CORE_DEPTH)/coreconf/config.mk -+ -+NSS_MAJOR_VERSION = `grep "NSS_VMAJOR" ../lib/nss/nss.h | awk '{print $$3}'` -+NSS_MINOR_VERSION = `grep "NSS_VMINOR" ../lib/nss/nss.h | awk '{print $$3}'` -+NSS_PATCH_VERSION = `grep "NSS_VPATCH" ../lib/nss/nss.h | awk '{print $$3}'` -+PREFIX = /usr -+ -+all: export libs -+ -+export: -+ # Create the nss.pc file -+ mkdir -p $(DIST)/lib/pkgconfig -+ sed -e "s,@prefix@,$(PREFIX)," \ -+ -e "s,@exec_prefix@,\$${prefix}/bin," \ -+ -e "s,@libdir@,\$${prefix}/lib," \ -+ -e "s,@includedir@,\$${prefix}/include/nss," \ -+ -e "s,@NSS_MAJOR_VERSION@,$(NSS_MAJOR_VERSION),g" \ -+ -e "s,@NSS_MINOR_VERSION@,$(NSS_MINOR_VERSION)," \ -+ -e "s,@NSS_PATCH_VERSION@,$(NSS_PATCH_VERSION)," \ -+ nss.pc.in > nss.pc -+ chmod 0644 nss.pc -+ ln -sf ../../../../nss/config/nss.pc $(DIST)/lib/pkgconfig -+ -+ # Create the nss-config script -+ mkdir -p $(DIST)/bin -+ sed -e "s,@prefix@,$(PREFIX)," \ -+ -e "s,@NSS_MAJOR_VERSION@,$(NSS_MAJOR_VERSION)," \ -+ -e "s,@NSS_MINOR_VERSION@,$(NSS_MINOR_VERSION)," \ -+ -e "s,@NSS_PATCH_VERSION@,$(NSS_PATCH_VERSION)," \ -+ nss-config.in > nss-config -+ chmod 0755 nss-config -+ ln -sf ../../../nss/config/nss-config $(DIST)/bin -+ -+libs: -+ -+dummy: all export libs -+ -diff -urN nss/config/nss-config.in nss-3.12.4/mozilla/security/nss/config/nss-config.in ---- nss/config/nss-config.in 1969-12-31 18:00:00.000000000 -0600 -+++ nss/config/nss-config.in 2009-09-14 21:47:45.190638078 -0500 -@@ -0,0 +1,145 @@ -+#!/bin/sh -+ -+prefix=@prefix@ -+ -+major_version=@NSS_MAJOR_VERSION@ -+minor_version=@NSS_MINOR_VERSION@ -+patch_version=@NSS_PATCH_VERSION@ -+ -+usage() -+{ -+ cat <<EOF -+Usage: nss-config [OPTIONS] [LIBRARIES] -+Options: -+ [--prefix[=DIR]] -+ [--exec-prefix[=DIR]] -+ [--includedir[=DIR]] -+ [--libdir[=DIR]] -+ [--version] -+ [--libs] -+ [--cflags] -+Dynamic Libraries: -+ nss -+ ssl -+ smime -+ nssutil -+EOF -+ exit $1 -+} -+ -+if test $# -eq 0; then -+ usage 1 1>&2 -+fi -+ -+lib_ssl=yes -+lib_smime=yes -+lib_nss=yes -+lib_nssutil=yes -+ -+while test $# -gt 0; do -+ case "$1" in -+ -*=*) optarg=`echo "$1" | sed 's/[-_a-zA-Z0-9]*=//'` ;; -+ *) optarg= ;; -+ esac -+ -+ case $1 in -+ --prefix=*) -+ prefix=$optarg -+ ;; -+ --prefix) -+ echo_prefix=yes -+ ;; -+ --exec-prefix=*) -+ exec_prefix=$optarg -+ ;; -+ --exec-prefix) -+ echo_exec_prefix=yes -+ ;; -+ --includedir=*) -+ includedir=$optarg -+ ;; -+ --includedir) -+ echo_includedir=yes -+ ;; -+ --libdir=*) -+ libdir=$optarg -+ ;; -+ --libdir) -+ echo_libdir=yes -+ ;; -+ --version) -+ echo ${major_version}.${minor_version}.${patch_version} -+ ;; -+ --cflags) -+ echo_cflags=yes -+ ;; -+ --libs) -+ echo_libs=yes -+ ;; -+ ssl) -+ lib_ssl=yes -+ ;; -+ smime) -+ lib_smime=yes -+ ;; -+ nss) -+ lib_nss=yes -+ ;; -+ nssutil) -+ lib_nssutil=yes -+ ;; -+ *) -+ usage 1 1>&2 -+ ;; -+ esac -+ shift -+done -+ -+# Set variables that may be dependent upon other variables -+if test -z "$exec_prefix"; then -+ exec_prefix=`pkg-config --variable=exec_prefix nss` -+fi -+if test -z "$includedir"; then -+ includedir=`pkg-config --variable=includedir nss` -+fi -+if test -z "$libdir"; then -+ libdir=`pkg-config --variable=libdir nss` -+fi -+ -+if test "$echo_prefix" = "yes"; then -+ echo $prefix -+fi -+ -+if test "$echo_exec_prefix" = "yes"; then -+ echo $exec_prefix -+fi -+ -+if test "$echo_includedir" = "yes"; then -+ echo $includedir -+fi -+ -+if test "$echo_libdir" = "yes"; then -+ echo $libdir -+fi -+ -+if test "$echo_cflags" = "yes"; then -+ echo -I$includedir -+fi -+ -+if test "$echo_libs" = "yes"; then -+ libdirs="-Wl,-R$libdir -L$libdir" -+ if test -n "$lib_ssl"; then -+ libdirs="$libdirs -lssl${major_version}" -+ fi -+ if test -n "$lib_smime"; then -+ libdirs="$libdirs -lsmime${major_version}" -+ fi -+ if test -n "$lib_nss"; then -+ libdirs="$libdirs -lnss${major_version}" -+ fi -+ if test -n "$lib_nssutil"; then -+ libdirs="$libdirs -lnssutil${major_version}" -+ fi -+ echo $libdirs -+fi -+ -diff -urN nss/config/nss.pc.in nss-3.12.4/mozilla/security/nss/config/nss.pc.in ---- nss/config/nss.pc.in 1969-12-31 18:00:00.000000000 -0600 -+++ nss/config/nss.pc.in 2009-09-14 21:45:45.653637310 -0500 -@@ -0,0 +1,12 @@ -+prefix=@prefix@ -+exec_prefix=@exec_prefix@ -+libdir=@libdir@ -+includedir=@includedir@ -+ -+Name: NSS -+Description: Network Security Services -+Version: @NSS_MAJOR_VERSION@.@NSS_MINOR_VERSION@.@NSS_PATCH_VERSION@ -+Requires: nspr >= 4.8 -+Libs: -L${libdir} -lssl3 -lsmime3 -lnssutil3 -lnss3 -Wl,-R${libdir} -+Cflags: -I${includedir} -+ diff --git a/Golden_Repo/n/NSS/NSS-3.51-GCCcore-10.3.0.eb b/Golden_Repo/n/NSS/NSS-3.51-GCCcore-10.3.0.eb deleted file mode 100644 index 10cf9e4364750063a4cc52d062feed0ea4569fc4..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NSS/NSS-3.51-GCCcore-10.3.0.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'MakeCp' - -name = 'NSS' -version = '3.51' - -homepage = 'https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSS' -description = """Network Security Services (NSS) is a set of libraries designed to support cross-platform development - of security-enabled client and server applications.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://ftp.mozilla.org/pub/security/nss/releases/NSS_%(version_major)s_%(version_minor)s_RTM/src/'] -sources = [SOURCELOWER_TAR_GZ] -patches = [ - 'NSS-3.39_pkgconfig.patch', - '%(name)s-%(version)s_fix_kremlin_ppc64le.patch', -] -checksums = [ - '75348b3b3229362486c57a880db917da1f96ef4eb639dc9cc2ff17d72268459c', # nss-3.51.tar.gz - '5c4b55842e5afd1e8e67b90635f6474510b89242963c4ac2622d3e3da9062774', # NSS-3.39_pkgconfig.patch - '2fc7bd556737d34a62c06f86899863b7071b71943ffb4facfb413a087b8bee2e', # NSS-3.51_fix_kremlin_ppc64le.patch -] - -builddependencies = [('binutils', '2.36.1')] -dependencies = [ - ('NSPR', '4.25'), - ('zlib', '1.2.11'), -] - -# building in parallel fails -parallel = 1 - -# fix for not being able to find header files -buildopts = 'BUILD_OPT=1 USE_64=1 CPATH="$EBROOTNSPR/include/nspr:$CPATH" ' -# fix c standard causing missing functions -buildopts += 'OS_REL_CFLAGS="-D_XOPEN_SOURCE " && ' -# also install pkgconfig file (see patch) -buildopts += "cd config && make PREFIX=%(installdir)s BUILD_OPT=1 USE_64=1 && cd -" - -files_to_copy = ['../dist/Linux*.OBJ/*', (['../dist/public/*'], 'include')] - -sanity_check_paths = { - 'files': ['lib/libnss.a'], - 'dirs': ['bin', 'include/dbm', 'include/nss'], -} - -modextrapaths = {'CPATH': 'include/nss'} - -moduleclass = 'lib' diff --git a/Golden_Repo/n/NSS/NSS-3.51-GCCcore-9.3.0.eb b/Golden_Repo/n/NSS/NSS-3.51-GCCcore-9.3.0.eb deleted file mode 100644 index 26856442045a22791ce1ab4ced192585bbd53b25..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NSS/NSS-3.51-GCCcore-9.3.0.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'MakeCp' - -name = 'NSS' -version = '3.51' - -homepage = 'https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSS' -description = """Network Security Services (NSS) is a set of libraries designed to support cross-platform development - of security-enabled client and server applications.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://ftp.mozilla.org/pub/security/nss/releases/NSS_%(version_major)s_%(version_minor)s_RTM/src/'] -sources = [SOURCELOWER_TAR_GZ] -patches = [ - 'NSS-3.39_pkgconfig.patch', - '%(name)s-%(version)s_fix_kremlin_ppc64le.patch', -] -checksums = [ - '75348b3b3229362486c57a880db917da1f96ef4eb639dc9cc2ff17d72268459c', # nss-3.51.tar.gz - '5c4b55842e5afd1e8e67b90635f6474510b89242963c4ac2622d3e3da9062774', # NSS-3.39_pkgconfig.patch - '2fc7bd556737d34a62c06f86899863b7071b71943ffb4facfb413a087b8bee2e', # NSS-3.51_fix_kremlin_ppc64le.patch -] - -builddependencies = [('binutils', '2.34')] -dependencies = [ - ('NSPR', '4.25'), - ('zlib', '1.2.11'), -] - -# building in parallel fails -parallel = 1 - -# fix for not being able to find header files -buildopts = 'BUILD_OPT=1 USE_64=1 CPATH="$EBROOTNSPR/include/nspr:$CPATH" ' -# fix c standard causing missing functions -buildopts += 'OS_REL_CFLAGS="-D_XOPEN_SOURCE " && ' -# also install pkgconfig file (see patch) -buildopts += "cd config && make PREFIX=%(installdir)s BUILD_OPT=1 USE_64=1 && cd -" - -files_to_copy = ['../dist/Linux*.OBJ/*', (['../dist/public/*'], 'include')] - -sanity_check_paths = { - 'files': ['lib/libnss.a'], - 'dirs': ['bin', 'include/dbm', 'include/nss'], -} - -modextrapaths = {'CPATH': 'include/nss'} - -moduleclass = 'lib' diff --git a/Golden_Repo/n/NSS/NSS-3.51_fix_kremlin_ppc64le.patch b/Golden_Repo/n/NSS/NSS-3.51_fix_kremlin_ppc64le.patch deleted file mode 100644 index 56e2edda9b18ec6b8bebb684adac00c2a831fb04..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NSS/NSS-3.51_fix_kremlin_ppc64le.patch +++ /dev/null @@ -1,28 +0,0 @@ -Patch based on https://github.com/FStarLang/kremlin/pull/167 -Prepared for EasyBuild by Simon Branford of the BEAR Software team at the University of Birmingham -diff -aur nss-3.51.orig/nss/lib/freebl/verified/kremlin/include/kremlin/internal/types.h nss-3.51/nss/lib/freebl/verified/kremlin/include/kremlin/internal/types.h ---- nss-3.51.orig/nss/lib/freebl/verified/kremlin/include/kremlin/internal/types.h 2020-03-23 20:23:06.943356000 +0000 -+++ nss-3.51/nss/lib/freebl/verified/kremlin/include/kremlin/internal/types.h 2020-03-23 20:24:27.270377000 +0000 -@@ -56,7 +56,8 @@ - #include <emmintrin.h> - typedef __m128i FStar_UInt128_uint128; - #elif !defined(KRML_VERIFIED_UINT128) && !defined(_MSC_VER) && \ -- (defined(__x86_64__) || defined(__x86_64) || defined(__aarch64__)) -+ (defined(__x86_64__) || defined(__x86_64) || defined(__aarch64__) || \ -+ (defined(__powerpc64__) && defined(__LITTLE_ENDIAN__))) - typedef unsigned __int128 FStar_UInt128_uint128; - #else - typedef struct FStar_UInt128_uint128_s { -diff -aur nss-3.51.orig/nss/lib/freebl/verified/kremlin/kremlib/dist/minimal/fstar_uint128_gcc64.h nss-3.51/nss/lib/freebl/verified/kremlin/kremlib/dist/minimal/fstar_uint128_gcc64.h ---- nss-3.51.orig/nss/lib/freebl/verified/kremlin/kremlib/dist/minimal/fstar_uint128_gcc64.h 2020-03-23 20:23:06.947505000 +0000 -+++ nss-3.51/nss/lib/freebl/verified/kremlin/kremlib/dist/minimal/fstar_uint128_gcc64.h 2020-03-23 20:25:20.007003000 +0000 -@@ -25,7 +25,8 @@ - #include "LowStar_Endianness.h" - - #if !defined(KRML_VERIFIED_UINT128) && !defined(_MSC_VER) && \ -- (defined(__x86_64__) || defined(__x86_64) || defined(__aarch64__)) -+ (defined(__x86_64__) || defined(__x86_64) || defined(__aarch64__) || \ -+ (defined(__powerpc64__) && defined(__LITTLE_ENDIAN__))) - - /* GCC + using native unsigned __int128 support */ - diff --git a/Golden_Repo/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb b/Golden_Repo/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb deleted file mode 100644 index 31b0fc972fdf0a710b9e9ef9a70020a681e6d60d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb +++ /dev/null @@ -1,85 +0,0 @@ -name = 'NVHPC' -version = '20.11' -local_gccver = '9.3.0' -versionsuffix = '-GCC-%s' % local_gccver - -homepage = 'https://developer.nvidia.com/hpc-sdk/' -description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)""" -site_contacts = 'a.herten@fz-juelich.de' - -toolchain = SYSTEM - -# By downloading, you accept the HPC SDK Software License Agreement (https://docs.nvidia.com/hpc-sdk/eula/index.html) -# accept_eula = True -source_urls = ['https://developer.download.nvidia.com/hpc-sdk/%(version)s/'] -local_tarball_tmpl = 'nvhpc_2020_%%(version_major)s%%(version_minor)s_Linux_%s_cuda_multi.tar.gz' -sources = [local_tarball_tmpl % '%(arch)s'] -checksums = [ - { - local_tarball_tmpl % 'x86_64': - 'c80fc26e5ba586696f7030f03054c1aaca0752a891c7923faf47eb23b66857ec', - local_tarball_tmpl % 'ppc64le': - '99e5a5437e82f3914e0fe81feb761a5b599a3fe8b31f3c2cac8ae47e8cdc7b0f' - } -] - -local_gccver = '9.3.0' -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), - ('CUDA', '11.0', '', SYSTEM), - # This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails - ('numactl', '2.0.13', '', SYSTEM) -] - -module_add_cuda = False -cuda_compute_capabilities = "7.0" - -# specify default CUDA version that should be used by NVHPC -# should match one of the CUDA versions that are included with this NVHPC version -# (see install_components/Linux_x86_64/20.7/cuda/) -# for NVHPC 20.7, those are: 11.0, 10.2, 10.1; -# this version can be tweaked from the EasyBuild command line with -# --try-amend=default_cuda_version="10.2" (for example) -default_cuda_version = '11.0' - -# NVHPC EasyBlock supports some features, which can be set via CLI or this easyconfig. -# The following list gives examples for the easyconfig -# -# NVHPC needs CUDA to work. Two options are available: 1) Use NVHPC-bundled CUDA, 2) use system CUDA -# 1) Bundled CUDA -# If no easybuild dependency to CUDA is present, the bundled CUDA is taken. A version needs to be specified with -# default_cuda_version = "11.0" -# in this easyconfig file; alternatively, it can be specified through the command line during installation with -# --try-amend=default_cuda_version="10.2" -# 2) CUDA provided via EasyBuild -# Use CUDAcore as a dependency, for example -# dependencies = [('CUDAcore', '11.0.2')] -# The parameter default_cuda_version still can be set as above. -# If not set, it will be deduced from the CUDA module (via $EBVERSIONCUDA) -# -# Define a NVHPC-default Compute Capability -# cuda_compute_capabilities = "8.0" -# Can also be specified on the EasyBuild command line via --cuda-compute-capabilities=8.0 -# Only single values supported, not lists of values! -# -# Options to add/remove things to/from environment module (defaults shown) -# module_byo_compilers = False # Remove compilers from PATH (Bring-your-own compilers) -# module_nvhpc_own_mpi = False # Add NVHPC's own pre-compiled OpenMPI -# module_add_math_libs = False # Add NVHPC's math libraries (which should be there from CUDA anyway) -# module_add_profilers = False # Add NVHPC's NVIDIA Profilers -# module_add_nccl = False # Add NVHPC's NCCL library -# module_add_nvshmem = False # Add NVHPC's NVSHMEM library -# module_add_cuda = False # Add NVHPC's bundled CUDA - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = ''' -family("compiler") -add_property("arch","gpu") -''' - -# Always do a recursive unload on compilers -recursive_module_unload = True diff --git a/Golden_Repo/n/NVHPC/NVHPC-20.7-GCC-9.3.0.eb b/Golden_Repo/n/NVHPC/NVHPC-20.7-GCC-9.3.0.eb deleted file mode 100644 index 9bfd7b836fc68f05d6ef056f1f30d78d50bedfc2..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NVHPC/NVHPC-20.7-GCC-9.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'NVHPC' -version = '20.7' -local_gccver = '9.3.0' -versionsuffix = '-GCC-%s' % local_gccver - -homepage = 'https://developer.nvidia.com/hpc-sdk/' -description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)""" - -site_contacts = 'a.herten@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['nvhpc_2020_%(version_major)s%(version_minor)s_Linux_x86_64_cuda_multi.tar.gz'] -checksums = ['a5c5c8726d2210f2310a852c6d6e03c9ef8c75e3643e9c94e24909f5e9c2ea7a'] - -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), - ('CUDA', '11.0', '', SYSTEM), - # This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails - ('numactl', '2.0.13', '', SYSTEM) -] - -module_add_cuda = False -default_cuda_version = "11.0" -cuda_compute_capabilities = "7.0" - - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = ''' -family("compiler") -add_property("arch","gpu") -''' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' diff --git a/Golden_Repo/n/NVHPC/NVHPC-20.9--nvccrc-fail.patch b/Golden_Repo/n/NVHPC/NVHPC-20.9--nvccrc-fail.patch deleted file mode 100644 index 3b379faa639fb413c188742ca509eb95ba5f7fc5..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NVHPC/NVHPC-20.9--nvccrc-fail.patch +++ /dev/null @@ -1,18 +0,0 @@ ---- nvhpc_2020_209_Linux_x86_64_cuda_11.0/install_components/Linux_x86_64/20.9/compilers/bin/rcfiles/nvccrc.bak 2020-10-28 11:40:18.642068000 +0100 -+++ nvhpc_2020_209_Linux_x86_64_cuda_11.0/install_components/Linux_x86_64/20.9/compilers/bin/rcfiles/nvccrc 2020-10-28 11:41:04.525783000 +0100 -@@ -51,10 +51,11 @@ - variable TREECUDADIR is default($COMPBASE/$CUDAMAJOR/$CUDAVER/bin); - variable CUDADIR is default($if($or($SETVER,$equal($NVCOMPILER_CUDA_HOME,)),$TREECUDADIR,$NVCOMPILER_CUDA_HOME/bin)); - --restart($if($isdir($CUDADIR), -- $if($expr($CUDAXXYY>=$NVREQCUDAVERSIONXXYY), -- $CUDADIR, -- $error(The nvc++ host compiler is only supported with CUDA 11.0 or newer)), -+restart($if($isdir($CUDADIR),$CUDADIR, -+# PATCH AH 2020-10-28: Otherwise, nvcc will not work...; Copied this from NVHPC 20.7 -+# $if($expr($CUDAXXYY>=$NVREQCUDAVERSIONXXYY), -+# $CUDADIR, -+# $error(The nvc++ host compiler is only supported with CUDA 11.0 or newer)), - $error( - $if($SETVER, - $if($equal($CUDAVER,), diff --git a/Golden_Repo/n/NVHPC/NVHPC-20.9-GCC-9.3.0.eb b/Golden_Repo/n/NVHPC/NVHPC-20.9-GCC-9.3.0.eb deleted file mode 100644 index 32773bf054eb8c0517405500a5e6fcb980ee5abf..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NVHPC/NVHPC-20.9-GCC-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -name = 'NVHPC' -version = '20.9' -local_gccver = '9.3.0' -versionsuffix = '-GCC-%s' % local_gccver - -homepage = 'https://developer.nvidia.com/hpc-sdk/' -description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)""" - -site_contacts = 'a.herten@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['nvhpc_2020_%(version_major)s%(version_minor)s_Linux_x86_64_cuda_11.0.tar.gz'] -checksums = ['8fa07d762e1b48155f3d531a16b8fffeb6f28b9d8a0033a1f2ba47fdb16ffd58'] - -patches = ['NVHPC-20.9--nvccrc-fail.patch'] - -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), - ('CUDA', '11.0', '', SYSTEM), - # This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails - ('numactl', '2.0.13', '', SYSTEM) -] - -module_add_cuda = False -default_cuda_version = "11.0" -cuda_compute_capabilities = "7.0" - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = ''' -family("compiler") -add_property("arch","gpu") -''' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' diff --git a/Golden_Repo/n/NVHPC/NVHPC-21.1-GCC-9.3.0.eb b/Golden_Repo/n/NVHPC/NVHPC-21.1-GCC-9.3.0.eb deleted file mode 100644 index c0ed2817d168fa8b0716ed1af3497ba44b1565db..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NVHPC/NVHPC-21.1-GCC-9.3.0.eb +++ /dev/null @@ -1,94 +0,0 @@ -# For using $SYSTEMNAME to determine compute capability. The local prefix is to appease the checker -import os as local_os - -name = 'NVHPC' -version = '21.1' -local_gccver = '9.3.0' -versionsuffix = '-GCC-%s' % local_gccver - -homepage = 'https://developer.nvidia.com/hpc-sdk/' -description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)""" -site_contacts = 'a.herten@fz-juelich.de' - -toolchain = SYSTEM - -# By downloading, you accept the HPC SDK Software License Agreement (https://docs.nvidia.com/hpc-sdk/eula/index.html) -# accept_eula = True -source_urls = ['https://developer.download.nvidia.com/hpc-sdk/%(version)s/'] -local_tarball_tmpl = 'nvhpc_2021_%%(version_major)s%%(version_minor)s_Linux_%s_cuda_multi.tar.gz' -sources = [local_tarball_tmpl % '%(arch)s'] -checksums = [ - { - local_tarball_tmpl % 'x86_64': - 'd529daf46404724ac3f005be4239f2c30e53f5220bb9453f367dccc3a74d6b41', - local_tarball_tmpl % 'ppc64le': - '99e5a5437e82f3914e0fe81feb761a5b599a3fe8b31f3c2cac8ae47e8cdc7b0f' - } -] - -local_gccver = '9.3.0' -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), - ('CUDA', '11.0', '', SYSTEM), - # This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails - ('numactl', '2.0.13', '', SYSTEM) -] - -module_add_cuda = False -cuda_compute_capabilities = { - 'juwels': '7.0', - 'juwelsbooster': '8.0', - 'jurecadc': '8.0', - 'jusuf': '7.0', - 'hdfml': '7.0' -}[local_os.environ['SYSTEMNAME']] - -# specify default CUDA version that should be used by NVHPC -# should match one of the CUDA versions that are included with this NVHPC version -# (see install_components/Linux_x86_64/20.7/cuda/) -# for NVHPC 20.7, those are: 11.0, 10.2, 10.1; -# this version can be tweaked from the EasyBuild command line with -# --try-amend=default_cuda_version="10.2" (for example) -default_cuda_version = '11.0' - -# NVHPC EasyBlock supports some features, which can be set via CLI or this easyconfig. -# The following list gives examples for the easyconfig -# -# NVHPC needs CUDA to work. Two options are available: 1) Use NVHPC-bundled CUDA, 2) use system CUDA -# 1) Bundled CUDA -# If no easybuild dependency to CUDA is present, the bundled CUDA is taken. A version needs to be specified with -# default_cuda_version = "11.0" -# in this easyconfig file; alternatively, it can be specified through the command line during installation with -# --try-amend=default_cuda_version="10.2" -# 2) CUDA provided via EasyBuild -# Use CUDAcore as a dependency, for example -# dependencies = [('CUDAcore', '11.0.2')] -# The parameter default_cuda_version still can be set as above. -# If not set, it will be deduced from the CUDA module (via $EBVERSIONCUDA) -# -# Define a NVHPC-default Compute Capability -# cuda_compute_capabilities = "8.0" -# Can also be specified on the EasyBuild command line via --cuda-compute-capabilities=8.0 -# Only single values supported, not lists of values! -# -# Options to add/remove things to/from environment module (defaults shown) -# module_byo_compilers = False # Remove compilers from PATH (Bring-your-own compilers) -# module_nvhpc_own_mpi = False # Add NVHPC's own pre-compiled OpenMPI -# module_add_math_libs = False # Add NVHPC's math libraries (which should be there from CUDA anyway) -# module_add_profilers = False # Add NVHPC's NVIDIA Profilers -# module_add_nccl = False # Add NVHPC's NCCL library -# module_add_nvshmem = False # Add NVHPC's NVSHMEM library -# module_add_cuda = False # Add NVHPC's bundled CUDA - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = ''' -family("compiler") -add_property("arch","gpu") -''' - -# Always do a recursive unload on compilers -recursive_module_unload = True diff --git a/Golden_Repo/n/NVHPC/NVHPC-21.5-GCC-10.3.0.eb b/Golden_Repo/n/NVHPC/NVHPC-21.5-GCC-10.3.0.eb deleted file mode 100644 index b81417b84031a303f50e3427c873aa2f6f1bbce6..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NVHPC/NVHPC-21.5-GCC-10.3.0.eb +++ /dev/null @@ -1,82 +0,0 @@ -# For using $SYSTEMNAME to determine compute capability. The local prefix is to appease the checker -import os as local_os - -name = 'NVHPC' -version = '21.5' -local_gccver = '10.3.0' -versionsuffix = f'-GCC-{local_gccver}' - -homepage = 'https://developer.nvidia.com/hpc-sdk/' -description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)""" -site_contacts = 'a.herten@fz-juelich.de' - -toolchain = SYSTEM - -# By downloading, you accept the HPC SDK Software License Agreement (https://docs.nvidia.com/hpc-sdk/eula/index.html) -# accept_eula = True -source_urls = ['https://developer.download.nvidia.com/hpc-sdk/%(version)s/'] -local_tarball_tmpl = 'nvhpc_2021_%%(version_major)s%%(version_minor)s_Linux_%s_cuda_multi.tar.gz' -sources = [local_tarball_tmpl % '%(arch)s'] -checksums = [ - { - local_tarball_tmpl % 'x86_64': - '21989e52c58a6914743631c8200de1fec7e10b3449c6c1833f3032ee74b85f8e', - } -] - -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.36.1', '', ('GCCcore', local_gccver)), - ('CUDA', '11.3', '', SYSTEM), - # This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails - ('numactl', '2.0.14', '', SYSTEM) -] - -module_add_cuda = False -cuda_compute_capabilities = { - 'juwels': '7.0', - 'juwelsbooster': '8.0', - 'jurecadc': '8.0', - 'jusuf': '7.0', - 'hdfml': '7.0' -}[local_os.environ['SYSTEMNAME']] - -# specify default CUDA version that should be used by NVHPC -# should match one of the CUDA versions that are included with this NVHPC version -# (see install_components/Linux_x86_64/20.7/cuda/) -# for NVHPC 20.7, those are: 11.0, 10.2, 10.1; -# this version can be tweaked from the EasyBuild command line with -# --try-amend=default_cuda_version="10.2" (for example) -default_cuda_version = '11.3' - -# NVHPC EasyBlock supports some features, which can be set via CLI or this easyconfig. -# The following list gives examples for the easyconfig -# -# NVHPC needs CUDA to work. Two options are available: 1) Use NVHPC-bundled CUDA, 2) use system CUDA -# 1) Bundled CUDA -# If no easybuild dependency to CUDA is present, the bundled CUDA is taken. A version needs to be specified with -# default_cuda_version = "11.0" -# in this easyconfig file; alternatively, it can be specified through the command line during installation with -# --try-amend=default_cuda_version="10.2" -# 2) CUDA provided via EasyBuild -# Use CUDAcore as a dependency, for example -# dependencies = [('CUDAcore', '11.0.2')] -# The parameter default_cuda_version still can be set as above. -# If not set, it will be deduced from the CUDA module (via $EBVERSIONCUDA) -# -# Define a NVHPC-default Compute Capability -# cuda_compute_capabilities = "8.0" -# Can also be specified on the EasyBuild command line via --cuda-compute-capabilities=8.0 -# Only single values supported, not lists of values! -# -# Options to add/remove things to/from environment module (defaults shown) -# module_byo_compilers = False # Remove compilers from PATH (Bring-your-own compilers) -# module_nvhpc_own_mpi = False # Add NVHPC's own pre-compiled OpenMPI -# module_add_math_libs = False # Add NVHPC's math libraries (which should be there from CUDA anyway) -# module_add_profilers = False # Add NVHPC's NVIDIA Profilers -# module_add_nccl = False # Add NVHPC's NCCL library -# module_add_nvshmem = False # Add NVHPC's NVSHMEM library -# module_add_cuda = False # Add NVHPC's bundled CUDA - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' diff --git a/Golden_Repo/n/NWChem/NWChem-7.0.2-intel-para-2020-Python-3.8.5.eb b/Golden_Repo/n/NWChem/NWChem-7.0.2-intel-para-2020-Python-3.8.5.eb deleted file mode 100644 index 33ca2da98c9160ae779f8f275e1862db5c061ddf..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NWChem/NWChem-7.0.2-intel-para-2020-Python-3.8.5.eb +++ /dev/null @@ -1,37 +0,0 @@ -name = 'NWChem' -version = '7.0.2' -versionsuffix = '-Python-%(pyver)s' -local_verdate = '2020-10-12' -local_revision = 'b9985dfa' - - -homepage = 'https://nwchemgit.github.io' -description = """NWChem aims to provide its users with computational chemistry tools that are scalable both in - their ability to treat large scientific computational chemistry problems efficiently, and in their use of available - parallel computing resources from high-performance parallel supercomputers to conventional workstation clusters. - NWChem software can handle: biomolecules, nanostructures, and solid-state; from quantum to classical, and all - combinations; Gaussian basis functions or plane-waves; scaling from one to thousands of processors; properties - and relativity.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'i8': True} -source_urls = ['https://github.com/nwchemgit/nwchem/releases/download/v%(version)s-release/'] -sources = ['nwchem-%%(version)s-release.revision-%s-src.%s.tar.bz2' % (local_revision, local_verdate)] -patches = [ - 'NWChem_fix-date.patch', -] - -dependencies = [('Python', '3.8.5')] - -# This easyconfig is using the default for armci_network (OPENIB) and -# thus needs infiniband libraries. -osdependencies = [ - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - ('libibumad-dev', 'libibumad-devel'), -] - -modules = 'all python' - -moduleclass = 'chem' diff --git a/Golden_Repo/n/NWChem/NWChem_fix-date.patch b/Golden_Repo/n/NWChem/NWChem_fix-date.patch deleted file mode 100644 index 5d846acc3c8b52d54d5aabbd1a8a794163b11646..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/NWChem/NWChem_fix-date.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- nwchem-6.1.1-src/src/GNUmakefile.orig 2012-12-10 17:23:43.236474825 +0100 -+++ nwchem-6.1.1-src/src/GNUmakefile 2012-12-10 17:23:55.916623423 +0100 -@@ -7,7 +7,7 @@ - - SUBDIRS = $(NWSUBDIRS) - -- LIB_DEFINES = -DCOMPILATION_DATE="'`date +%a_%b_%d_%H:%M:%S_%Y`'" \ -+ LIB_DEFINES = -DCOMPILATION_DATE="'`date`'" \ - -DCOMPILATION_DIR="'$(TOPDIR)'" \ - -DNWCHEM_BRANCH="'$(CODE_BRANCH)'" - ifeq ($(TARGET),FUJITSU_VPP) diff --git a/Golden_Repo/n/Ninja-Python/Ninja-Python-1.10.0-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/n/Ninja-Python/Ninja-Python-1.10.0-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 6e96e6e34b83ee40153773c8c8537413d52dd0c9..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Ninja-Python/Ninja-Python-1.10.0-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'Ninja-Python' -version = '1.10.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://ninja-build.org/' -description = "Ninja is a small build system with a focus on speed." - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://pypi.python.org/packages/source/n/ninja'] -sources = [ - {'download_filename': 'ninja-%(version)s.tar.gz', 'filename': SOURCE_TAR_GZ}] - - -dependencies = [ - ('Python', '3.8.5'), - ('scikit', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), -] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - - -options = {'modulename': 'ninja'} - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/ninja'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Ninja/Ninja-1.10.0-GCCcore-10.3.0.eb b/Golden_Repo/n/Ninja/Ninja-1.10.0-GCCcore-10.3.0.eb deleted file mode 100644 index 9a04969a4b4b405963209fddce3abcd0ed402680..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Ninja/Ninja-1.10.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'CmdCp' - -name = 'Ninja' -version = '1.10.0' - -homepage = 'https://ninja-build.org/' -description = "Ninja is a small build system with a focus on speed." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/ninja-build/ninja/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Python', '3.8.5'), -] - -cmds_map = [('.*', "./configure.py --bootstrap")] - -files_to_copy = [(['ninja'], 'bin')] - -sanity_check_paths = { - 'files': ['bin/ninja'], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Ninja/Ninja-1.10.0-GCCcore-9.3.0.eb b/Golden_Repo/n/Ninja/Ninja-1.10.0-GCCcore-9.3.0.eb deleted file mode 100644 index de6033b72cd1cfe3f45de8c05be0817519c6a44a..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Ninja/Ninja-1.10.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'CmdCp' - -name = 'Ninja' -version = '1.10.0' - -homepage = 'https://ninja-build.org/' -description = "Ninja is a small build system with a focus on speed." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/ninja-build/ninja/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('Python', '3.8.5'), -] - -cmds_map = [('.*', "./configure.py --bootstrap")] - -files_to_copy = [(['ninja'], 'bin')] - -sanity_check_paths = { - 'files': ['bin/ninja'], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.1.2-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.1.2-GCCcore-9.3.0.eb deleted file mode 100644 index 6ab692fb7218aac5e90e1f88699e4256dd4b1833..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.1.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2020.1.2' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.4-28820667.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['6d3613817bc963927f72cc209ca650e3'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('nvidia-driver', 'default', '', SYSTEM), - # ('OpenGL', '2019a') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround due to wrong permissions once the files are extracted from the .run file -postinstallcmds = ['find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;'] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb deleted file mode 100644 index aa0623aacad01744fcc4a27fb8e97ec3c6daa668..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2020.2.0' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.18-28964561.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['604769a55a72adce8f1513fcacb36d7cf5b5e3cc99b65d6a20e4d5e987344cb0'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - # ('nvidia-driver', 'default', '', SYSTEM), - ('X11', '20200222') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround due to wrong permissions once the files are extracted from the .run file -postinstallcmds = ['find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;'] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.3.0-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.3.0-GCCcore-9.3.0.eb deleted file mode 100644 index c6029d714fce57ffe2b3fdaa402cf00a1a29a11f..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2020.3.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2020.3.0' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.18-29307467.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['af838473286e180904ec972892faf13ef4546b0e0fb539b87c06382daadfe2c1'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - # ('nvidia-driver', 'default', '', SYSTEM), - ('X11', '20200222') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround due to wrong permissions once the files are extracted from the .run file -postinstallcmds = ['find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;'] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.1.0-GCCcore-10.3.0.eb b/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.1.0-GCCcore-10.3.0.eb deleted file mode 100644 index a926f85ac905416f54588756d8cb054128fd1086..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.1.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2021.1.0' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.18-29693910.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['6c31066468eaad57eb94a6527012eeefbcf70c1c0a7ce71ca85f546efe667bb8'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('X11', '20200222') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround due to wrong permissions once the files are extracted from the .run file -postinstallcmds = [ - 'find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;'] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.1.0-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.1.0-GCCcore-9.3.0.eb deleted file mode 100644 index 7417665e4107eb258b85d47bcaa644311e1e2a1f..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.1.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2021.1.0' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.18-29693910.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['6c31066468eaad57eb94a6527012eeefbcf70c1c0a7ce71ca85f546efe667bb8'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('X11', '20200222') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround due to wrong permissions once the files are extracted from the .run file -postinstallcmds = [ - 'find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;'] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.2.0-GCCcore-10.3.0.eb b/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.2.0-GCCcore-10.3.0.eb deleted file mode 100644 index f451b971f0dc63f6fa104df00784d3d083cea1a9..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Compute/Nsight-Compute-2021.2.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2021.2.0' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.15-30066266.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['fd058676115b8003ec2370a2bf199fbd6c6346811c4b4e3e1707f6ad044032b1'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('X11', '20200222') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround due to wrong permissions once the files are extracted from the .run file -postinstallcmds = [ - 'find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;'] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.3.1-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.3.1-GCCcore-9.3.0.eb deleted file mode 100644 index a9cc5bb736540f18482570ba2ae1110a71ee0c38..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.3.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Systems' -version = '2020.3.1' -homepage = 'https://developer.nvidia.com/nsight-systems' -description = 'NVIDIA Nsight Systems is a system-wide performance analysis tool' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -# GCCcore toolchain is not strictly necessary, but used to bring it to same level as Nsight Compute -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-systems' -sources = [{ - 'filename': 'NVIDIA_Nsight_Systems_Linux_%(version)s.72.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['5fc69c9834f3dfcf1e8053e69d3f740f'] - -dependencies = [ - ('nvidia-driver', 'default', '', SYSTEM), - # ('OpenGL', '2019a') -] - -extract_sources = True -unpack_options = '--accept --nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -sanity_check_paths = { - 'files': ['bin/nsys'], - 'dirs': ['target-linux-x64', 'host-linux-x64'] -} - -modextravars = { - 'NSIGHT_DOC': '%(installdir)s/documentation/nsys-exporter' -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.4.1-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.4.1-GCCcore-9.3.0.eb deleted file mode 100644 index 5a28c9b8d194527bcea41c9633034569fdefbbf3..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.4.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Systems' -version = '2020.4.1' -homepage = 'https://developer.nvidia.com/nsight-systems' -description = 'NVIDIA Nsight Systems is a system-wide performance analysis tool' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -# GCCcore toolchain is not strictly necessary, but used to bring it to same level as Nsight Compute -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-systems' -sources = [{ - 'filename': 'NsightSystems-linux-public-%(version)s.126-d2d452d.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['298236c46c36aa9c8784ee2d69e687e9901e8c7ca77dc0a4bcf0857a76995bf8'] - -dependencies = [ - # ('nvidia-driver', 'default', '', SYSTEM), - ('X11', '20200222') -] - - -extract_sources = True -unpack_options = '--accept --nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -sanity_check_paths = { - 'files': ['bin/nsys'], - 'dirs': ['target-linux-x64', 'host-linux-x64'] -} - -modextravars = { - 'NSIGHT_DOC': '%(installdir)s/documentation/nsys-exporter' -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.5.1-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.5.1-GCCcore-9.3.0.eb deleted file mode 100644 index 677c9574e150e21ac3000489f742236f1c5412f3..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2020.5.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Systems' -version = '2020.5.1' -homepage = 'https://developer.nvidia.com/nsight-systems' -description = 'NVIDIA Nsight Systems is a system-wide performance analysis tool' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -# GCCcore toolchain is not strictly necessary, but used to bring it to same level as Nsight Compute -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-systems' -sources = [{ - 'filename': 'NsightSystems-linux-public-%(version)s.83-db7e763.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['d9990dc2c237418b171bd0871949aeff4fc7d2febfa7a3ea0d7eed2b010903f0'] - -dependencies = [ - # ('nvidia-driver', 'default', '', SYSTEM), - ('X11', '20200222') -] - - -extract_sources = True -unpack_options = '--accept --nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -sanity_check_paths = { - 'files': ['bin/nsys'], - 'dirs': ['target-linux-x64', 'host-linux-x64'] -} - -modextravars = { - 'NSIGHT_DOC': '%(installdir)s/documentation/nsys-exporter' -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.1.1-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.1.1-GCCcore-9.3.0.eb deleted file mode 100644 index 095049209b8d9622838f4cd82e4df14251bf5adc..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.1.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Systems' -version = '2021.1.1' -homepage = 'https://developer.nvidia.com/nsight-systems' -description = 'NVIDIA Nsight Systems is a system-wide performance analysis tool' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -# GCCcore toolchain is not strictly necessary, but used to bring it to same level as Nsight Compute -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-systems' -sources = [{ - 'filename': 'NsightSystems-linux-public-%(version)s.66-6c5c5cb.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['1549e4dd1c9526a285836940b1e2b79864ac540a58557ddeb76db947ffda2f87'] - -dependencies = [ - # ('nvidia-driver', 'default', '', SYSTEM), - ('X11', '20200222') -] - - -extract_sources = True -unpack_options = '--accept --nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -sanity_check_paths = { - 'files': ['bin/nsys'], - 'dirs': ['target-linux-x64', 'host-linux-x64'] -} - -modextravars = { - 'NSIGHT_DOC': '%(installdir)s/documentation/nsys-exporter' -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.2.1-GCCcore-10.3.0.eb b/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.2.1-GCCcore-10.3.0.eb deleted file mode 100644 index 801b400cacdc2426e11faa8f8a88686ca50a3758..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.2.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Systems' -version = '2021.2.1' -homepage = 'https://developer.nvidia.com/nsight-systems' -description = 'NVIDIA Nsight Systems is a system-wide performance analysis tool' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -# GCCcore toolchain is not strictly necessary, but used to bring it to same level as Nsight Compute -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-systems' -sources = [{ - 'filename': 'NsightSystems-linux-public-%(version)s.58-642947b.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['d99ddc87e8933937edb8744e55c5e4054f2f702880dd799f0ba2905e768aae4c'] - -dependencies = [ - ('X11', '20200222') -] - - -extract_sources = True -unpack_options = '--accept --nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -sanity_check_paths = { - 'files': ['bin/nsys'], - 'dirs': ['target-linux-x64', 'host-linux-x64'] -} - -modextravars = { - 'NSIGHT_DOC': '%(installdir)s/documentation/nsys-exporter' -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.2.1-GCCcore-9.3.0.eb b/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.2.1-GCCcore-9.3.0.eb deleted file mode 100644 index 7801e75ed266a9bb1e4b968ccb121c9bcce9b4b4..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/Nsight-Systems/Nsight-Systems-2021.2.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Systems' -version = '2021.2.1' -homepage = 'https://developer.nvidia.com/nsight-systems' -description = 'NVIDIA Nsight Systems is a system-wide performance analysis tool' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -# GCCcore toolchain is not strictly necessary, but used to bring it to same level as Nsight Compute -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-systems' -sources = [{ - 'filename': 'NsightSystems-linux-public-%(version)s.58-642947b.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['d99ddc87e8933937edb8744e55c5e4054f2f702880dd799f0ba2905e768aae4c'] - -dependencies = [ - ('X11', '20200222') -] - - -extract_sources = True -unpack_options = '--accept --nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -sanity_check_paths = { - 'files': ['bin/nsys'], - 'dirs': ['target-linux-x64', 'host-linux-x64'] -} - -modextravars = { - 'NSIGHT_DOC': '%(installdir)s/documentation/nsys-exporter' -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Golden_Repo/n/nano/nano-5.5-GCCcore-10.3.0.eb b/Golden_Repo/n/nano/nano-5.5-GCCcore-10.3.0.eb deleted file mode 100644 index 3827efbebabeef617a12f0e62b1df760b349b8db..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nano/nano-5.5-GCCcore-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'nano' -version = '5.5' - -homepage = 'https://www.nano-editor.org/' -description = """GNU nano is a small and friendly text editor. Besides basic text editing, nano -offers features like undo/redo, syntax coloring, interactive search-and-replace, -auto-indentation, line numbers, word completion, file locking, backup files, and -internationalization support.""" - - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [('binutils', '2.36.1')] - -sanity_check_paths = { - 'files': ['bin/nano'], - 'dirs': ['bin', 'share'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/n/nano/nano-5.5-GCCcore-9.3.0.eb b/Golden_Repo/n/nano/nano-5.5-GCCcore-9.3.0.eb deleted file mode 100644 index 4d0344e37406e862cb7db74b883065368d263ad0..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nano/nano-5.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'nano' -version = '5.5' - -homepage = 'https://www.nano-editor.org/' -description = """GNU nano is a small and friendly text editor. Besides basic text editing, nano -offers features like undo/redo, syntax coloring, interactive search-and-replace, -auto-indentation, line numbers, word completion, file locking, backup files, and -internationalization support.""" - - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [('binutils', '2.34')] - -sanity_check_paths = { - 'files': ['bin/nano'], - 'dirs': ['bin', 'share'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/n/ncurses/ncurses-6.2-GCCcore-10.3.0.eb b/Golden_Repo/n/ncurses/ncurses-6.2-GCCcore-10.3.0.eb deleted file mode 100644 index 5117d001f8836a00676ac6299cf6c54b9ac890f2..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/ncurses/ncurses-6.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ncurses' -version = '6.2' - -homepage = 'https://www.gnu.org/software/ncurses/' -description = """ -The Ncurses (new curses) library is a free software emulation of curses in -System V Release 4.0, and more. It uses Terminfo format, supports pads and -color and multiple highlights and forms characters and function-key mapping, -and has all the other SYSV-curses enhancements over BSD Curses. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['30306e0c76e0f9f1f0de987cf1c82a5c21e1ce6568b9227f7da5b71cbea86c9d'] - -builddependencies = [('binutils', '2.36.1')] - -local_common_configopts = "--with-shared --enable-overwrite --enable-symlinks " -configopts = [ - # default build - local_common_configopts, - # the UTF-8 enabled version (ncursesw) - local_common_configopts + \ - "--enable-ext-colors --enable-widec --includedir=%(installdir)s/include/ncursesw/", -] - -# we need to add -fPIC, but should also include -O* option to avoid compiling with -O0 (default for GCC) -buildopts = 'CFLAGS="-O2 -fPIC"' - -# Symlink libtinfo to libncurses (since it can handle the API) so it doesn't get picked up from the OS -postinstallcmds = [ - "ln -s %(installdir)s/lib/libncurses.so %(installdir)s/lib/libtinfo.so", - "ln -s %(installdir)s/lib/libncurses.a %(installdir)s/lib/libtinfo.a" -] - -local_libs = ["form", "menu", "ncurses", "panel"] -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ["captoinfo", "clear", "infocmp", "infotocap", "ncurses%(version_major)s-config", - "reset", "tabs", "tic", "toe", "tput", "tset"]] + - ['lib/lib%s%s.a' % (x, y) for x in local_libs for y in ['', '_g', 'w', 'w_g']] + - ['lib/lib%s%s.%s' % (x, y, SHLIB_EXT) for x in local_libs for y in ['', 'w']] + - ['lib/libncurses++%s.a' % x for x in ['', 'w']], - 'dirs': ['include', 'include/ncursesw'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/n/ncurses/ncurses-6.2-GCCcore-9.3.0.eb b/Golden_Repo/n/ncurses/ncurses-6.2-GCCcore-9.3.0.eb deleted file mode 100644 index 4ad8426f51d197c470e35c9bb724095e77c3332c..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/ncurses/ncurses-6.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ncurses' -version = '6.2' - -homepage = 'https://www.gnu.org/software/ncurses/' -description = """The Ncurses (new curses) library is a free software emulation of curses in System V Release 4.0, - and more. It uses Terminfo format, supports pads and color and multiple highlights and forms characters and - function-key mapping, and has all the other SYSV-curses enhancements over BSD Curses.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [('binutils', '2.34')] - -local_common_configopts = "--with-shared --enable-overwrite --enable-symlinks " -configopts = [ - # default build - local_common_configopts, - # the UTF-8 enabled version (ncursesw) - local_common_configopts + "--enable-ext-colors --enable-widec --includedir=%(installdir)s/include/ncursesw/", -] - -# we need to add -fPIC, but should also include -O* option to avoid compiling with -O0 (default for GCC) -buildopts = 'CFLAGS="-O2 -fPIC"' - -# Symlink libtinfo to libncurses (since it can handle the API) so it doesn't get picked up from the OS -postinstallcmds = [ - "ln -s %(installdir)s/lib/libncurses.so %(installdir)s/lib/libtinfo.so", - "ln -s %(installdir)s/lib/libncurses.a %(installdir)s/lib/libtinfo.a" -] - -local_libs = ["form", "menu", "ncurses", "panel"] -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ["captoinfo", "clear", "infocmp", "infotocap", "ncurses%(version_major)s-config", - "reset", "tabs", "tic", "toe", "tput", "tset"]] + - ['lib/lib%s%s.a' % (x, y) for x in local_libs for y in ['', '_g', 'w', 'w_g']] + - ['lib/lib%s%s.%s' % (x, y, SHLIB_EXT) for x in local_libs for y in ['', 'w']] + - ['lib/libncurses++%s.a' % x for x in ['', 'w']], - 'dirs': ['include', 'include/ncursesw'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/n/ncurses/ncurses-6.2.eb b/Golden_Repo/n/ncurses/ncurses-6.2.eb deleted file mode 100644 index b7c787d09310aa284cb8550e92697e83f488cd2e..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/ncurses/ncurses-6.2.eb +++ /dev/null @@ -1,46 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ncurses' -version = '6.2' - -homepage = 'https://www.gnu.org/software/ncurses/' -description = """The Ncurses (new curses) library is a free software emulation of curses in System V Release 4.0, - and more. It uses Terminfo format, supports pads and color and multiple highlights and forms characters and - function-key mapping, and has all the other SYSV-curses enhancements over BSD Curses.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -local_common_configopts = "--with-shared --enable-overwrite --without-ada --enable-symlinks " -configopts = [ - # default build - local_common_configopts, - # the UTF-8 enabled version (ncursesw) - local_common_configopts + "--enable-ext-colors --enable-widec --includedir=%(installdir)s/include/ncursesw/", -] - -# need to take care of $CFLAGS ourselves with dummy toolchain -# we need to add -fPIC, but should also include -O* option to avoid compiling with -O0 (default for GCC) -buildopts = 'CFLAGS="-O2 -fPIC"' - -# Symlink libtinfo to libncurses (since it can handle the API) so it doesn't get picked up from the OS -postinstallcmds = [ - "ln -s %(installdir)s/lib/libncurses.so %(installdir)s/lib/libtinfo.so", - "ln -s %(installdir)s/lib/libncurses.a %(installdir)s/lib/libtinfo.a" -] - -local_libs = ["form", "menu", "ncurses", "panel"] -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ["captoinfo", "clear", "infocmp", "infotocap", "ncurses%(version_major)s-config", - "reset", "tabs", "tic", "toe", "tput", "tset"]] + - ['lib/lib%s%s.a' % (x, y) for x in local_libs for y in ['', '_g', 'w', 'w_g']] + - ['lib/lib%s%s.%s' % (x, y, SHLIB_EXT) for x in local_libs for y in ['', 'w']] + - ['lib/libncurses++%s.a' % x for x in ['', 'w']], - 'dirs': ['include', 'include/ncursesw'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/n/ncview/ncview-2.1.8-GCC-9.3.0.eb b/Golden_Repo/n/ncview/ncview-2.1.8-GCC-9.3.0.eb deleted file mode 100644 index 86e6d32d08edd2b1909e7edec37d8a545ff1150d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/ncview/ncview-2.1.8-GCC-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ncview' -version = "2.1.8" - -homepage = 'http://meteora.ucsd.edu/~pierce/ncview_home_page.html' -description = """Ncview is a visual browser for netCDF format files.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['ftp://cirrus.ucsd.edu/pub/ncview/'] -sources = [SOURCE_TAR_GZ] -checksums = ['e8badc507b9b774801288d1c2d59eb79ab31b004df4858d0674ed0d87dfc91be'] - -preconfigopts = 'CC=$(which $CC) ' -configopts = '--with-udunits2_incdir=$EBROOTUDUNITS/include --with-udunits2_libdir=$EBROOTUDUNITS/lib ' -configopts += '--with-png_libdir=$EBROOTLIBPNG/lib --with-png_incdir=$EBROOTLIBPNG/include' - -builddependencies = [ - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('netCDF', '4.7.4', '-serial'), - ('UDUNITS', '2.2.26'), - ('X11', '20200222'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), -] - -sanity_check_paths = { - 'files': ['bin/ncview'], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-GCCcore-10.3.0-serial.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-GCCcore-10.3.0-serial.eb deleted file mode 100644 index d4e1f579acecaf929a7f0aa57b80cb4a9b381006..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-GCCcore-10.3.0-serial.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' -versionsuffix = '-serial' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -builddependencies = [('binutils', '2.36.1')] -dependencies = [('netCDF', '4.7.4', '-serial')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-GCCcore-9.3.0-serial.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-GCCcore-9.3.0-serial.eb deleted file mode 100644 index 939d09985a61594196d2e5a01144da5d75d2e16f..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-GCCcore-9.3.0-serial.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' -versionsuffix = '-serial' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -builddependencies = [('binutils', '2.34')] -dependencies = [('netCDF', '4.7.4', '-serial')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gompi-2020.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gompi-2020.eb deleted file mode 100644 index 4579678528d194e85d13e299bff06a71660fae51..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gompi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gompi-2021.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gompi-2021.eb deleted file mode 100644 index cf332fdfaf801f3321cbda1c404293f10743f1a4..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gompi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gpsmpi-2020.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gpsmpi-2020.eb deleted file mode 100644 index c4b757c09637fea967a9fcbabc1d736128b391ce..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gpsmpi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gpsmpi-2021.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gpsmpi-2021.eb deleted file mode 100644 index 4ab5b013190f51db223799068fd1a1dfcd90abb1..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-gpsmpi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iimpi-2020.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iimpi-2020.eb deleted file mode 100644 index 876d07bb8686cebaa1f8fdde3a626b5a00613d3a..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iimpi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iimpi-2021.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iimpi-2021.eb deleted file mode 100644 index 5e9aaaba68a18abf599fe317a0f63ec0e3063d77..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iimpi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iompi-2020.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iompi-2020.eb deleted file mode 100644 index 004248a3895cbf017105a68c04f320f2833d1d6d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iompi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iompi-2021.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iompi-2021.eb deleted file mode 100644 index 42f21c3d9f9cb14c32ff37d29da0d96b9aa4b4f1..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-iompi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2020-mt.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2020-mt.eb deleted file mode 100644 index 4f62c13c0aa41434335bcea0a16950cadb2c689e..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2020.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2020.eb deleted file mode 100644 index 22bf3f75d12c9c083acb7959ad7222e3644e469c..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2021.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2021.eb deleted file mode 100644 index a3df2e3423b2c7a251dfa18ba5f582beba0fe72d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-ipsmpi-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2020.1.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2020.1.eb deleted file mode 100644 index 0a8927a4edf42e7daad736d403b348623f2f008a..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2020.1.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2020.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2020.eb deleted file mode 100644 index 9a7ec940edf50ee42a364906715ccd204400c48c..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2020.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2021.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2021.eb deleted file mode 100644 index 88908860332e61cb09546869d8ce420088fb3568..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-npsmpic-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-nvompic-2021.eb b/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-nvompic-2021.eb deleted file mode 100644 index 9357b9fc9317dd63a54bfa61c5cbf7735d29bfbb..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-C++4/netCDF-C++4-4.3.1-nvompic-2021.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'netCDF-C++4' -version = '4.3.1' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'nvompic', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc'] - -dependencies = [('netCDF', '4.7.4')] - -sanity_check_paths = { - 'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-GCCcore-10.3.0-serial.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-GCCcore-10.3.0-serial.eb deleted file mode 100644 index a7b5773a00532a49bca229d4548567e1c6ba224f..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-GCCcore-10.3.0-serial.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' -versionsuffix = '-serial' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -builddependencies = [('binutils', '2.36.1')] -dependencies = [('netCDF', '4.7.4', '-serial')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-GCCcore-9.3.0-serial.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-GCCcore-9.3.0-serial.eb deleted file mode 100644 index e9a3b8bc201d881d3dec93555763922ae76c1a81..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-GCCcore-9.3.0-serial.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' -versionsuffix = '-serial' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -builddependencies = [('binutils', '2.34')] -dependencies = [('netCDF', '4.7.4', '-serial')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gompi-2020.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gompi-2020.eb deleted file mode 100644 index 02af1d5a4d1da21c3dfb4a5e35f867f36decda39..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gompi-2020.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gompi-2021.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gompi-2021.eb deleted file mode 100644 index 7c647354b32c994983f24c0545d7b336dab9733c..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gompi-2021.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gpsmpi-2020.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gpsmpi-2020.eb deleted file mode 100644 index 3fa91feb07cca3de93b39cda74753535e7e8ef1a..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gpsmpi-2020.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gpsmpi-2021.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gpsmpi-2021.eb deleted file mode 100644 index 00215b657c1d462633a7b4fc7a1f888a56f407ed..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-gpsmpi-2021.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iimpi-2020.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iimpi-2020.eb deleted file mode 100644 index 84c7b35e3ac2481730dc241dfa4fd98208b5df4e..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iimpi-2020.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iimpi-2021.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iimpi-2021.eb deleted file mode 100644 index 1afc701197a719093cd1a1d83477f3ae685181db..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iimpi-2021.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iompi-2020.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iompi-2020.eb deleted file mode 100644 index 4603bd8d204b006b82f51baf790229f171205c71..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iompi-2020.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iompi-2021.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iompi-2021.eb deleted file mode 100644 index 3222cb7d3eb2592cc0407de7d69ba27d4f823943..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-iompi-2021.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2020-mt.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2020-mt.eb deleted file mode 100644 index fefcf3167349fa45ceced87f6db55d69db231140..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2020.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2020.eb deleted file mode 100644 index 0aa63c6ff2ea593430e272ddaa8990f1e9346e25..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2020.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2021.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2021.eb deleted file mode 100644 index b826d6c186fac2ac4f8d218e8ac65107d22b1532..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-ipsmpi-2021.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2020.1.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2020.1.eb deleted file mode 100644 index 143a4142725c9a1f3bb3be3f75464b3f70069750..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2020.1.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2020.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2020.eb deleted file mode 100644 index 1d4b9294359a5ca464448bb81d52dce8d4aa7f2c..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2020.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2021.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2021.eb deleted file mode 100644 index e55c36f5deda600298778bb0da75a85731388650..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-npsmpic-2021.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-nvompic-2021.eb b/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-nvompic-2021.eb deleted file mode 100644 index 6433911f64df36786aba2bf89c18907cd77126d7..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF-Fortran/netCDF-Fortran-4.5.3-nvompic-2021.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'netCDF-Fortran' -version = '4.5.3' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'nvompic', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = [ - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/', - 'ftp://ftp.unidata.ucar.edu/pub/netcdf/old', -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['123a5c6184336891e62cf2936b9f2d1c54e8dee299cfd9d2c1a1eb05dd668a74'] - -dependencies = [('netCDF', '4.7.4')] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/gcc10.patch b/Golden_Repo/n/netCDF/gcc10.patch deleted file mode 100644 index e75cfba826a0037324bb7bf4d95bdcdcf2618609..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/gcc10.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- netcdf-c-4.7.4/ncdump/ocprint.c.orig 2021-06-21 17:48:09.683508311 +0200 -+++ netcdf-c-4.7.4/ncdump/ocprint.c 2021-06-21 17:46:47.919655878 +0200 -@@ -56,7 +56,7 @@ - /*Mnemonic*/ - #define TOPLEVEL 1 - --int ocdebug; -+extern int ocdebug; - - static OCerror ocstat; - static OClink glink; diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-GCCcore-10.3.0-serial.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-GCCcore-10.3.0-serial.eb deleted file mode 100644 index 0c6e35608c5ac55313bd6f4a38c32074c78dffff..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-GCCcore-10.3.0-serial.eb +++ /dev/null @@ -1,47 +0,0 @@ -name = 'netCDF' -version = '4.7.4' -versionsuffix = '-serial' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -patches = ['gcc10.patch'] -checksums = [ - '99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b', # v4.7.4.tar.gz - 'ff51e8b1b57896e7c7b7d34635522adf8b0658eeda90899b5060a176b0d0be9d', # gcc10.patch -] - -dependencies = [ - ('HDF5', '1.10.6', '-serial'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include -DBUILD_SHARED_LIBS=ON ", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-GCCcore-9.3.0-serial.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-GCCcore-9.3.0-serial.eb deleted file mode 100644 index a9b40c0b086cab356a69e54eb680747956ae79e7..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-GCCcore-9.3.0-serial.eb +++ /dev/null @@ -1,43 +0,0 @@ -name = 'netCDF' -version = '4.7.4' -versionsuffix = '-serial' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6', '-serial'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), -] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-gompi-2020.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-gompi-2020.eb deleted file mode 100644 index caf338f731becd8486766baa70e69ee453f2558f..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-gompi-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-gompi-2021.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-gompi-2021.eb deleted file mode 100644 index bf0d010172354734cc340160a5525d19874038c6..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-gompi-2021.eb +++ /dev/null @@ -1,48 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -patches = ['gcc10.patch'] -checksums = [ - '99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b', # v4.7.4.tar.gz - 'ff51e8b1b57896e7c7b7d34635522adf8b0658eeda90899b5060a176b0d0be9d', # gcc10.patch -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-gpsmpi-2020.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-gpsmpi-2020.eb deleted file mode 100644 index a7e86f9e4e2e4e319aa05e47dd60256580c64258..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-gpsmpi-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-gpsmpi-2021.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-gpsmpi-2021.eb deleted file mode 100644 index a210c6bb3711a324d992577d0dd53601e9b1c98e..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-gpsmpi-2021.eb +++ /dev/null @@ -1,48 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -patches = ['gcc10.patch'] -checksums = [ - '99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b', # v4.7.4.tar.gz - 'ff51e8b1b57896e7c7b7d34635522adf8b0658eeda90899b5060a176b0d0be9d', # gcc10.patch -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-iimpi-2020.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-iimpi-2020.eb deleted file mode 100644 index d7d31b7976c6d9d609700eb0ed2b0c447f3f751d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-iimpi-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-iimpi-2021.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-iimpi-2021.eb deleted file mode 100644 index 022c1f2dc379874343857c77bd41a6ab3709e3c2..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-iimpi-2021.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-iompi-2020.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-iompi-2020.eb deleted file mode 100644 index 000bde607dc100300c8c448c4a2043b85579ff37..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-iompi-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-iompi-2021.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-iompi-2021.eb deleted file mode 100644 index caf95281b376d4fbe3fcdd7ed4f1bba1b9782f82..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-iompi-2021.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2020-mt.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2020-mt.eb deleted file mode 100644 index 00e98ec126e2fd33ca9cb892bd195fc30ec71d08..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2020.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2020.eb deleted file mode 100644 index 564f73cf1b7352dab2771fb0541fdbdee152d20d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2021.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2021.eb deleted file mode 100644 index c796d315c87d842fe431f8bb546448b7043e809d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-ipsmpi-2021.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18'), -] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2020.1.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2020.1.eb deleted file mode 100644 index acfc28cf534695f7030ccb065f7f37d484f3f91c..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2020.1.eb +++ /dev/null @@ -1,46 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -patches = ['remove_double_shared_in_link_command.patch'] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2020.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2020.eb deleted file mode 100644 index d0fbeaeec637bb0eb5112851df9e9057c49b11cc..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2020.eb +++ /dev/null @@ -1,46 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18'), -] - -patches = ['remove_double_shared_in_link_command.patch'] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2021.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2021.eb deleted file mode 100644 index 6337d1b1de9af0f64b6b026867ad50a5d682e6f2..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-npsmpic-2021.eb +++ /dev/null @@ -1,46 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18'), -] - -patches = ['remove_double_shared_in_link_command.patch'] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/netCDF-4.7.4-nvompic-2021.eb b/Golden_Repo/n/netCDF/netCDF-4.7.4-nvompic-2021.eb deleted file mode 100644 index 8643413fc3db8c7b431e332e331e74d92e6312c1..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/netCDF-4.7.4-nvompic-2021.eb +++ /dev/null @@ -1,46 +0,0 @@ -name = 'netCDF' -version = '4.7.4' - -homepage = 'http://www.unidata.ucar.edu/software/netcdf/' -description = """NetCDF (network Common Data Form) is a set of software libraries - and machine-independent data formats that support the creation, access, and sharing of array-oriented - scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'nvompic', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf-c/archive/'] -sources = ['v%s.tar.gz' % (version)] -checksums = ['99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('cURL', '7.71.1'), - ('Szip', '2.1.1'), - ('parallel-netcdf', '1.12.1') -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18'), -] - -patches = ['remove_double_shared_in_link_command.patch'] - -# make sure both static and shared libs are built -configopts = [ - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=ON", - "-DCURL_LIBRARY=$EBROOTCURL/lib/libcurl.so -DCURL_INCLUDE_DIR=$EBROOTCURL/include " + - "-DENABLE_PNETCDF=ON -DBUILD_SHARED_LIBS=OFF", -] - -sanity_check_paths = { - 'files': ["include/netcdf_mem.h", "include/netcdf_par.h"], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/n/netCDF/remove_double_shared_in_link_command.patch b/Golden_Repo/n/netCDF/remove_double_shared_in_link_command.patch deleted file mode 100644 index 96cb9b2970bf68d1c283e2bd532ac99164225bc3..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netCDF/remove_double_shared_in_link_command.patch +++ /dev/null @@ -1,20 +0,0 @@ -diff -Nru netcdf-c-4.7.4.orig/liblib/CMakeLists.txt netcdf-c-4.7.4/liblib/CMakeLists.txt ---- netcdf-c-4.7.4.orig/liblib/CMakeLists.txt 2020-09-20 20:17:25.395953767 +0200 -+++ netcdf-c-4.7.4/liblib/CMakeLists.txt 2020-09-20 20:17:54.671071349 +0200 -@@ -111,11 +111,11 @@ - ) - ENDIF() - --IF(NOT MSVC) -- IF(BUILD_SHARED_LIBS) -- SET_TARGET_PROPERTIES(netcdf PROPERTIES LINK_FLAGS -shared) -- ENDIF() --ENDIF() -+#IF(NOT MSVC) -+# IF(BUILD_SHARED_LIBS) -+# SET_TARGET_PROPERTIES(netcdf PROPERTIES LINK_FLAGS -shared) -+# ENDIF() -+#ENDIF() - - IF(ENABLE_SHARED_LIBRARY_VERSION) - SET_TARGET_PROPERTIES(netcdf PROPERTIES diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-GCCcore-10.3.0-serial-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-GCCcore-10.3.0-serial-Python-3.8.5.eb deleted file mode 100644 index 55bf3f5ec88e942e7331f2a68c8dff2a6a5ffcfe..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-GCCcore-10.3.0-serial-Python-3.8.5.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-serial%s' % local_pysuffix - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -builddependencies = [ - ('binutils', '2.36.1'), -] -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', local_pysuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('netCDF', '4.7.4', '-serial'), -] - -use_pip = True -sanity_pip_check = True - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-GCCcore-9.3.0-serial-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-GCCcore-9.3.0-serial-Python-3.8.5.eb deleted file mode 100644 index 71bf4f44f00b310cf0ff6e0ab6ad0508313b69ea..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-GCCcore-9.3.0-serial-Python-3.8.5.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-serial%s' % local_pysuffix - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -builddependencies = [ - ('binutils', '2.34'), -] -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', local_pysuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('netCDF', '4.7.4', '-serial'), -] - -use_pip = True -sanity_pip_check = True - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gompi-2021-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gompi-2021-Python-3.8.5.eb deleted file mode 100644 index 08b67582fe35050b19fd946b7b16724e672b20f2..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index d15e753b887b178311a7542cb5cd28c328596e5d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 1e75d831f14251a82c1cdfbcab6fa5bfb47b6558..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-iimpi-2021-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-iimpi-2021-Python-3.8.5.eb deleted file mode 100644 index c6ee1b772f8cb7f18fa18bcede4d7ad937187bf7..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-iimpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-iompi-2021-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-iompi-2021-Python-3.8.5.eb deleted file mode 100644 index f95f2efca3659626cac5e762071ab88a5eb7966c..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-iompi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2020-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index eb556e313557705771705c612092036b7c422fd9..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2020-mt-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2020-mt-Python-3.8.5.eb deleted file mode 100644 index b7debd8155dada2b671127446263e5aafaf7462f..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2020-mt-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2021-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 62b63870d545a6637af18b88a9c57965af535dde..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-ipsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-npsmpic-2021-Python-3.8.5.eb b/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-npsmpic-2021-Python-3.8.5.eb deleted file mode 100644 index 4a5516b2311bcb357fb133333066b375c1ad2eb4..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/netcdf4-python/netcdf4-python-1.5.4-npsmpic-2021-Python-3.8.5.eb +++ /dev/null @@ -1,46 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'netcdf4-python' -version = '1.5.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://unidata.github.io/netcdf4-python/' -description = """Python/numpy interface to netCDF.""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/Unidata/netcdf4-python/archive/'] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('netCDF', '4.7.4'), - ('mpi4py', '3.0.3', versionsuffix), -] - -use_pip = True -sanity_pip_check = True -runtest = False # mpirun problems -skipsteps = ['sanitycheck'] # mpirun problems -preinstallopts = 'CFLAGS=-noswitcherror' # fix compiler problem during extension installation - - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('cftime', '1.2.1', { - 'checksums': ['ab5d5076f7d3e699758a244ada7c66da96bae36e22b9e351ce0ececc36f0a57f'], - }), - (name, version, { - 'source_tmpl': 'netCDF4-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/n/netCDF4'], - 'checksums': ['941de6f3623b6474ecb4d043be5990690f7af4cf0d593b31be912627fe5aad03'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -moduleclass = 'data' diff --git a/Golden_Repo/n/nettle/nettle-3.6-GCCcore-10.3.0.eb b/Golden_Repo/n/nettle/nettle-3.6-GCCcore-10.3.0.eb deleted file mode 100644 index bb7b01f58e1f3cf45f59510e14764425312f3fa2..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nettle/nettle-3.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'nettle' -version = '3.6' - -homepage = 'http://www.lysator.liu.se/~nisse/nettle/' -description = """Nettle is a cryptographic library that is designed to fit easily -in more or less any context: In crypto toolkits for object-oriented -languages (C++, Python, Pike, ...), in applications like LSH or GNUPG, -or even in kernel space. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('M4', '1.4.18') -] - -dependencies = [ - ('GMP', '6.2.0'), -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['nettle-hash', 'nettle-lfib-stream', 'pkcs1-conv', 'sexp-conv']] + - ['lib64/libhogweed.a', 'lib64/libhogweed.%s' % SHLIB_EXT, - 'lib64/libnettle.a', 'lib64/libnettle.%s' % SHLIB_EXT], - 'dirs': ['include/nettle'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/n/nettle/nettle-3.6-GCCcore-9.3.0.eb b/Golden_Repo/n/nettle/nettle-3.6-GCCcore-9.3.0.eb deleted file mode 100644 index fdb9453add329ff9c8be92c6db90f8a7d7a541fa..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nettle/nettle-3.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'nettle' -version = '3.6' - -homepage = 'http://www.lysator.liu.se/~nisse/nettle/' -description = """Nettle is a cryptographic library that is designed to fit easily -in more or less any context: In crypto toolkits for object-oriented -languages (C++, Python, Pike, ...), in applications like LSH or GNUPG, -or even in kernel space. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), - ('M4', '1.4.18') -] - -dependencies = [ - ('GMP', '6.2.0'), -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in ['nettle-hash', 'nettle-lfib-stream', 'pkcs1-conv', 'sexp-conv']] + - ['lib64/libhogweed.a', 'lib64/libhogweed.%s' % SHLIB_EXT, - 'lib64/libnettle.a', 'lib64/libnettle.%s' % SHLIB_EXT], - 'dirs': ['include/nettle'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/n/nlohmann-json/nlohmann-json-3.9.1-GCCcore-10.3.0.eb b/Golden_Repo/n/nlohmann-json/nlohmann-json-3.9.1-GCCcore-10.3.0.eb deleted file mode 100644 index 7fa2d2a44ff895111f8fa9f7d2938b76d64118b4..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nlohmann-json/nlohmann-json-3.9.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'nlohmann-json' -version = '3.9.1' - -homepage = "https://github.com/nlohmann/json" -description = """JSON for modern C++ by Niels Lohmann -""" - -site_contacts = 'h.zilken@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/nlohmann/json/archive'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('binutils', '2.36.1') -] - -sanity_check_paths = { - 'files': [], - 'dirs': [('include', 'lib64')] -} diff --git a/Golden_Repo/n/nlohmann-json/nlohmann-json-3.9.1-GCCcore-9.3.0.eb b/Golden_Repo/n/nlohmann-json/nlohmann-json-3.9.1-GCCcore-9.3.0.eb deleted file mode 100644 index b740fe842b438eef851c04d8f36c9d96a033ed76..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nlohmann-json/nlohmann-json-3.9.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'nlohmann-json' -version = '3.9.1' - -homepage = "https://github.com/nlohmann/json" -description = """JSON for modern C++ by Niels Lohmann -""" - -site_contacts = 'h.zilken@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/nlohmann/json/archive'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34') -] - -sanity_check_paths = { - 'files': [], - 'dirs': [('include', 'lib64')] -} diff --git a/Golden_Repo/n/nodejs/nodejs-12.19.0-GCCcore-10.3.0.eb b/Golden_Repo/n/nodejs/nodejs-12.19.0-GCCcore-10.3.0.eb deleted file mode 100644 index b7481c5aace236e497717919bbc289a3a454bc31..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nodejs/nodejs-12.19.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'nodejs' -version = '12.19.0' -local_libversion = '72' - -homepage = 'https://nodejs.org' -description = """Node.js is a platform built on Chrome's JavaScript runtime - for easily building fast, scalable network applications. Node.js uses an - event-driven, non-blocking I/O model that makes it lightweight and efficient, - perfect for data-intensive real-time applications that run across distributed devices.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://%(name)s.org/dist/v%(version)s/'] -sources = ['node-v%(version)s.tar.gz'] -checksums = ['1279e49be60d92cf4c1a48c9d92397db4e9284a100bc47689ce7924686bbddd1'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Python', '3.8.5'), -] - -configopts = [ - '--with-intl=none', # Fully disable ICU to avoid issues with the embedded icu-small library - '--shared --with-intl=none', # Build libnode.so in a second run -] - -# Link libv8 libs to libnode -postinstallcmds = [ - "cd %%(installdir)s/lib; ln -s libnode.%s.%s libnode.%s" % (SHLIB_EXT, local_libversion, SHLIB_EXT), - "cd %%(installdir)s/lib; ln -s libnode.%s.%s libv8.%s" % (SHLIB_EXT, local_libversion, SHLIB_EXT), - "cd %%(installdir)s/lib; ln -s libnode.%s.%s libv8_libbase.%s" % (SHLIB_EXT, local_libversion, SHLIB_EXT), - "cd %%(installdir)s/lib; ln -s libnode.%s.%s libv8_libplatform.%s" % (SHLIB_EXT, local_libversion, SHLIB_EXT), -] - -sanity_check_paths = { - 'files': ['bin/node', 'bin/npm', 'lib/libnode.%s.%s' % (SHLIB_EXT, local_libversion)], - 'dirs': ['lib/node_modules', 'include/node'] -} - -modextrapaths = {'CPATH': 'include/node'} - -moduleclass = 'lang' diff --git a/Golden_Repo/n/nodejs/nodejs-12.19.0-GCCcore-9.3.0.eb b/Golden_Repo/n/nodejs/nodejs-12.19.0-GCCcore-9.3.0.eb deleted file mode 100644 index fc8283d69c6eef1e64e261eac88f20bdce88f324..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nodejs/nodejs-12.19.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'nodejs' -version = '12.19.0' -local_libversion = '72' - -homepage = 'https://nodejs.org' -description = """Node.js is a platform built on Chrome's JavaScript runtime - for easily building fast, scalable network applications. Node.js uses an - event-driven, non-blocking I/O model that makes it lightweight and efficient, - perfect for data-intensive real-time applications that run across distributed devices.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://%(name)s.org/dist/v%(version)s/'] -sources = ['node-v%(version)s.tar.gz'] -checksums = ['1279e49be60d92cf4c1a48c9d92397db4e9284a100bc47689ce7924686bbddd1'] - -builddependencies = [ - ('binutils', '2.34'), - ('Python', '3.8.5'), -] - -configopts = [ - '--with-intl=none', # Fully disable ICU to avoid issues with the embedded icu-small library - '--shared --with-intl=none', # Build libnode.so in a second run -] - -# Link libv8 libs to libnode -postinstallcmds = [ - "cd %%(installdir)s/lib; ln -s libnode.%s.%s libnode.%s" % (SHLIB_EXT, local_libversion, SHLIB_EXT), - "cd %%(installdir)s/lib; ln -s libnode.%s.%s libv8.%s" % (SHLIB_EXT, local_libversion, SHLIB_EXT), - "cd %%(installdir)s/lib; ln -s libnode.%s.%s libv8_libbase.%s" % (SHLIB_EXT, local_libversion, SHLIB_EXT), - "cd %%(installdir)s/lib; ln -s libnode.%s.%s libv8_libplatform.%s" % (SHLIB_EXT, local_libversion, SHLIB_EXT), -] - -sanity_check_paths = { - 'files': ['bin/node', 'bin/npm', 'lib/libnode.%s.%s' % (SHLIB_EXT, local_libversion)], - 'dirs': ['lib/node_modules', 'include/node'] -} - -modextrapaths = {'CPATH': 'include/node'} - -moduleclass = 'lang' diff --git a/Golden_Repo/n/npsmpic/npsmpic-2020.1.eb b/Golden_Repo/n/npsmpic/npsmpic-2020.1.eb deleted file mode 100644 index 514004d1501807d10abaeac4b61d5b14261af9ec..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/npsmpic/npsmpic-2020.1.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'Toolchain' - -name = 'npsmpic' -version = '2020.1' - -homepage = '(none)' -description = 'NVHPC based compiler toolchain, including Parastation MPICH2 for MPI support.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compiler = ('NVHPC', '21.1-GCC-9.3.0') - -dependencies = [ - local_compiler, - ('CUDA', '11.0', '', SYSTEM), - ('psmpi', '5.4.8-1', '', local_compiler), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/n/npsmpic/npsmpic-2020.eb b/Golden_Repo/n/npsmpic/npsmpic-2020.eb deleted file mode 100644 index 7bcfeea34d26cde8aa42ce7f0de0124028c6631a..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/npsmpic/npsmpic-2020.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'Toolchain' - -name = 'npsmpic' -version = '2020' - -homepage = '(none)' -description = 'NVHPC based compiler toolchain, including Parastation MPICH2 for MPI support.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compiler = ('NVHPC', '20.9-GCC-9.3.0') - -dependencies = [ - local_compiler, - ('CUDA', '11.0', '', SYSTEM), - ('psmpi', '5.4.7-1', '', local_compiler), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/n/npsmpic/npsmpic-2021.eb b/Golden_Repo/n/npsmpic/npsmpic-2021.eb deleted file mode 100644 index 5b94e35a99c3208adff365d7eea8b75b0e6a6d1a..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/npsmpic/npsmpic-2021.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'Toolchain' - -name = 'npsmpic' -version = '2021' - -homepage = '(none)' -description = 'NVHPC based compiler toolchain, including Parastation MPICH2 for MPI support.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compiler = ('NVHPC', '21.5-GCC-10.3.0') - -dependencies = [ - local_compiler, - ('CUDA', '11.3', '', SYSTEM), - ('psmpi', '5.4.9-1', '', local_compiler), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/n/nsync/nsync-1.24.0-GCCcore-10.3.0.eb b/Golden_Repo/n/nsync/nsync-1.24.0-GCCcore-10.3.0.eb deleted file mode 100644 index 44574dc1d0ff752f7c71b48f4b95579205435b06..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nsync/nsync-1.24.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'CMakeNinja' - -name = 'nsync' -version = '1.24.0' - -homepage = 'https://github.com/google/nsync' -description = """nsync is a C library that exports various synchronization primitives, such as mutexes""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://github.com/google/nsync/archive/v%(version)s/'] -sources = [SOURCE_TAR_GZ] -checksums = ['47a6eb2a295be5121a1904a6a775722338a20dc02ee3eec4169ed2c3f203617a'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('Ninja', '1.10.0'), -] - -sanity_check_paths = { - 'files': ['include/nsync.h', 'lib/libnsync.a', 'lib/libnsync_cpp.a'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/n/nsync/nsync-1.24.0-GCCcore-9.3.0.eb b/Golden_Repo/n/nsync/nsync-1.24.0-GCCcore-9.3.0.eb deleted file mode 100644 index c1390e0922ae72ad1614a062b84c8eaf09a96b7d..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nsync/nsync-1.24.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'CMakeNinja' - -name = 'nsync' -version = '1.24.0' - -homepage = 'https://github.com/google/nsync' -description = """nsync is a C library that exports various synchronization primitives, such as mutexes""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://github.com/google/nsync/archive/v%(version)s/'] -sources = [SOURCE_TAR_GZ] -checksums = ['47a6eb2a295be5121a1904a6a775722338a20dc02ee3eec4169ed2c3f203617a'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Ninja', '1.10.0'), -] - -sanity_check_paths = { - 'files': ['include/nsync.h', 'lib/libnsync.a', 'lib/libnsync_cpp.a'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/n/numactl/numactl-2.0.13.eb b/Golden_Repo/n/numactl/numactl-2.0.13.eb deleted file mode 100644 index bfe0390759ccb8007d8759bf64e3d5d670038153..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/numactl/numactl-2.0.13.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'numactl' -version = '2.0.13' - -homepage = 'https://github.com/numactl/numactl' - -description = """ - The numactl program allows you to run your application program on specific - cpu's and memory nodes. It does this by supplying a NUMA memory policy to - the operating system before running your program. The libnuma library provides - convenient ways for you to add NUMA memory policies into your own program. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/numactl/numactl/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['97ee012b2f294903530424b4ff7f28bcaad6a356897ce8777383f87e5c2e325d'] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), -] - -preconfigopts = './autogen.sh && ' - -sanity_check_paths = { - 'files': ['bin/numactl', 'bin/numastat', 'lib/libnuma.%s' % SHLIB_EXT, 'lib/libnuma.a'], - 'dirs': ['share/man', 'include'] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/n/numactl/numactl-2.0.14.eb b/Golden_Repo/n/numactl/numactl-2.0.14.eb deleted file mode 100644 index 7c666b2fe7d60f0e730b78c50c756178df3e42e0..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/numactl/numactl-2.0.14.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'numactl' -version = '2.0.14' - -homepage = 'https://github.com/numactl/numactl' - -description = """ - The numactl program allows you to run your application program on specific - cpu's and memory nodes. It does this by supplying a NUMA memory policy to - the operating system before running your program. The libnuma library provides - convenient ways for you to add NUMA memory policies into your own program. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/numactl/numactl/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['1ee27abd07ff6ba140aaf9bc6379b37825e54496e01d6f7343330cf1a4487035'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), -] - -preconfigopts = './autogen.sh && ' - -sanity_check_paths = { - 'files': ['bin/numactl', 'bin/numastat', 'lib/libnuma.%s' % SHLIB_EXT, 'lib/libnuma.a'], - 'dirs': ['share/man', 'include'] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/n/numba/llvmlite-0.31.0_fix-ffi-Makefile.patch b/Golden_Repo/n/numba/llvmlite-0.31.0_fix-ffi-Makefile.patch deleted file mode 100644 index ba8332525af65dc14b6d4561f9c7e46b7f63e930..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/numba/llvmlite-0.31.0_fix-ffi-Makefile.patch +++ /dev/null @@ -1,13 +0,0 @@ -Make sure easybuild libs are used for llvmlite -Author: Ariel Lozano (ULB) ---- a/ffi/Makefile.linux 2019-10-10 21:15:38.000000000 +0200 -+++ b/ffi/Makefile.linux 2019-11-13 12:22:07.061890000 +0100 -@@ -7,7 +7,7 @@ - - CXXFLAGS := $(CPPFLAGS) $(CXXFLAGS) $(LLVM_CXXFLAGS) $(CXX_FLTO_FLAGS) - LDFLAGS := $(LDFLAGS) $(LLVM_LDFLAGS) $(LD_FLTO_FLAGS) --LIBS = $(LLVM_LIBS) -+LIBS := $(LIBS) $(LLVM_LIBS) - INCLUDE = core.h - SRC = assembly.cpp bitcode.cpp core.cpp initfini.cpp module.cpp value.cpp \ - executionengine.cpp transforms.cpp passmanagers.cpp targets.cpp dylib.cpp \ diff --git a/Golden_Repo/n/numba/llvmlite-fpic.patch b/Golden_Repo/n/numba/llvmlite-fpic.patch deleted file mode 100644 index 3d667cb506b6241d69b84e9a9660fb80dd998b97..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/numba/llvmlite-fpic.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ruN llvmlite-0.20.0.orig/ffi/Makefile.linux llvmlite-0.20.0/ffi/Makefile.linux ---- llvmlite-0.20.0.orig/ffi/Makefile.linux 2017-03-28 20:16:54.000000000 +0200 -+++ llvmlite-0.20.0/ffi/Makefile.linux 2017-10-20 12:00:17.815421000 +0200 -@@ -19,7 +19,7 @@ - $(OUTPUT): $(SRC) $(INCLUDE) - # static-libstdc++ avoids runtime dependencies on a - # particular libstdc++ version. -- $(CXX) $(CXX_STATIC_LINK) -shared $(CXXFLAGS) $(SRC) -o $(OUTPUT) $(LDFLAGS) $(LIBS) -+ $(CXX) $(CXX_STATIC_LINK) -fPIC -shared $(CXXFLAGS) $(SRC) -o $(OUTPUT) $(LDFLAGS) $(LIBS) - - clean: - rm -rf test $(OUTPUT) diff --git a/Golden_Repo/n/numba/numba-0.51.1-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/n/numba/numba-0.51.1-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index b655fdf3da426b9c64f42c5d9377e05f7afb5458..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/numba/numba-0.51.1-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'numba' -version = '0.51.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://numba.pydata.org/' -description = """Numba is an Open Source NumPy-aware optimizing compiler for -Python sponsored by Continuum Analytics, Inc. It uses the remarkable LLVM -compiler infrastructure to compile Python syntax to machine code.""" - -usage = ''' -In case you intend to use CUDA functionality of Numba, you need to set the following variables -AFTER loading the CUDA module: - -NUMBAPRO_LIBDEVICE="$CUDA_HOME/nvvm/libdevice/" -NUMBAPRO_NVVM="$CUDA_HOME/nvvm/lib64/libnvvm.so" -''' - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), - ('LLVM', '10.0.1'), -] - -use_pip = True -sanity_pip_check = True - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('icc-rt', '2020.0.133', { - 'modulename': False, - 'source_tmpl': 'icc_rt-%(version)s-py2.py3-none-manylinux1_x86_64.whl', - 'unpack_sources': False, - }), - ('intel-openmp', '2020.0.133', { - 'modulename': False, - 'source_tmpl': 'intel_openmp-%(version)s-py2.py3-none-manylinux1_x86_64.whl', - 'unpack_sources': False, - }), - ('llvmlite', '0.34.0', { - 'patches': ['llvmlite-0.31.0_fix-ffi-Makefile.patch', 'llvmlite-fpic.patch'], - 'preinstallopts': "export LLVM_CONFIG=${EBROOTLLVM}/bin/llvm-config && ", - 'source_urls': ['https://pypi.python.org/packages/source/l/llvmlite/'], - }), - (name, version, { - 'source_urls': ['https://pypi.python.org/packages/source/n/numba/'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -sanity_check_paths = { - 'files': ['bin/numba', 'bin/pycc'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -sanity_check_commands = ["numba --help"] - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'lang' diff --git a/Golden_Repo/n/numba/numba-0.51.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/n/numba/numba-0.51.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 1201d6c59a825a5e91767a381d501943192324ab..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/numba/numba-0.51.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'numba' -version = '0.51.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://numba.pydata.org/' -description = """Numba is an Open Source NumPy-aware optimizing compiler for -Python sponsored by Continuum Analytics, Inc. It uses the remarkable LLVM -compiler infrastructure to compile Python syntax to machine code.""" - -usage = ''' -In case you intend to use CUDA functionality of Numba, you need to set the following variables -AFTER loading the CUDA module: - -NUMBAPRO_LIBDEVICE="$CUDA_HOME/nvvm/libdevice/" -NUMBAPRO_NVVM="$CUDA_HOME/nvvm/lib64/libnvvm.so" -''' - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), - ('LLVM', '10.0.1'), -] - -use_pip = True -sanity_pip_check = True - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('icc-rt', '2020.0.133', { - 'modulename': False, - 'source_tmpl': 'icc_rt-%(version)s-py2.py3-none-manylinux1_x86_64.whl', - 'unpack_sources': False, - }), - ('intel-openmp', '2020.0.133', { - 'modulename': False, - 'source_tmpl': 'intel_openmp-%(version)s-py2.py3-none-manylinux1_x86_64.whl', - 'unpack_sources': False, - }), - ('llvmlite', '0.34.0', { - 'patches': ['llvmlite-0.31.0_fix-ffi-Makefile.patch', 'llvmlite-fpic.patch'], - 'preinstallopts': "export LLVM_CONFIG=${EBROOTLLVM}/bin/llvm-config && ", - 'source_urls': ['https://pypi.python.org/packages/source/l/llvmlite/'], - }), - (name, version, { - 'source_urls': ['https://pypi.python.org/packages/source/n/numba/'], - }), -] - -fix_python_shebang_for = ['bin/*'] - -sanity_check_paths = { - 'files': ['bin/numba', 'bin/pycc'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -sanity_check_commands = ["numba --help"] - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'lang' diff --git a/Golden_Repo/n/nvidia-driver/nvidia-driver-default.eb b/Golden_Repo/n/nvidia-driver/nvidia-driver-default.eb deleted file mode 100644 index a309773c297b72c7f6623467664bce54b5a6cd26..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nvidia-driver/nvidia-driver-default.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'nvidia-driver' -version = 'default' -realversion = '465.31' - -homepage = 'https://developer.nvidia.com/cuda-toolkit' -description = """This is a set of libraries normally installed by the NVIDIA driver installer.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['http://us.download.nvidia.com/tesla/%s/' % realversion] -sources = ['NVIDIA-Linux-x86_64-%s.run' % realversion] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'system' diff --git a/Golden_Repo/n/nvompic/nvompic-2021.eb b/Golden_Repo/n/nvompic/nvompic-2021.eb deleted file mode 100644 index 666d0ea218ab4b32c9ff23a8268a9d1ed606ade0..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nvompic/nvompic-2021.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'Toolchain' - -name = 'nvompic' -version = '2021' - -homepage = '(none)' -description = 'NVHPC based compiler toolchain, including OpenMPI for MPI support.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -local_compiler = ('NVHPC', '21.5-GCC-10.3.0') - -dependencies = [ - local_compiler, - ('CUDA', '11.3', '', SYSTEM), - ('OpenMPI', '4.1.1', '', local_compiler), -] - -moduleclass = 'toolchain' diff --git a/Golden_Repo/n/nvptx-tools/nvptx-tools-20180301.eb b/Golden_Repo/n/nvptx-tools/nvptx-tools-20180301.eb deleted file mode 100644 index 72a6b5e27204fb5b0de9a42b75bb0d9d7b04dc56..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nvptx-tools/nvptx-tools-20180301.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'nvptx-tools' -version = '20180301' - -homepage = 'https://github.com/MentorEmbedded/nvptx-tools' -description = """nvptx-tools: a collection of tools for use with nvptx-none GCC toolchains.""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -sources = ['5f6f343a302d620b0868edab376c00b15741e39e.zip'] -source_urls = ['https://github.com/MentorEmbedded/nvptx-tools/archive/'] - -patches = ['nvptx-tools-20180301_sm_30_to_sm_35.patch'] -checksums = [ - 'afcce591a9cae7babd5422724b9e85b45061fcc42d8ce55123225eb255f678ed', # 5f6f343a302d620b0868edab376c00b15741e39e.zip - '2d5733bed28c533f2db5eecc3670e3470ffc1dfea6789e57f9079b46f45e4f0c', # nvptx-tools-20180301_sm_30_to_sm_35.patch -] - -hidden = True - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["nvptx-none-as", "nvptx-none-ranlib", "nvptx-none-ar", "nvptx-none-ld"]], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/n/nvptx-tools/nvptx-tools-20180301_sm_30_to_sm_35.patch b/Golden_Repo/n/nvptx-tools/nvptx-tools-20180301_sm_30_to_sm_35.patch deleted file mode 100644 index 36f393aa870b444e2f3a00dd2a621ecc8de66212..0000000000000000000000000000000000000000 --- a/Golden_Repo/n/nvptx-tools/nvptx-tools-20180301_sm_30_to_sm_35.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ruN nvptx-tools-5f6f343a302d620b0868edab376c00b15741e39e.orig/nvptx-as.c nvptx-tools-5f6f343a302d620b0868edab376c00b15741e39e/nvptx-as.c ---- nvptx-tools-5f6f343a302d620b0868edab376c00b15741e39e.orig/nvptx-as.c 2018-03-01 18:31:24.000000000 +0100 -+++ nvptx-tools-5f6f343a302d620b0868edab376c00b15741e39e/nvptx-as.c 2020-07-15 23:33:06.804514239 +0200 -@@ -1047,7 +1047,7 @@ - FILE *out = stdout; - bool verbose __attribute__((unused)) = false; - int verify = -1; -- const char *smver = "sm_30"; -+ const char *smver = "sm_35"; - - int o; - int option_index = 0; diff --git a/Golden_Repo/o/OPARI2/OPARI2-2.0.5-GCCcore-9.3.0.eb b/Golden_Repo/o/OPARI2/OPARI2-2.0.5-GCCcore-9.3.0.eb deleted file mode 100644 index 94207c5567a2a24762fe449e9b24e94dd1723b08..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OPARI2/OPARI2-2.0.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'ConfigureMake' - -name = 'OPARI2' -version = '2.0.5' - -homepage = 'http://www.score-p.org' -description = """ -OPARI2, the successor of Forschungszentrum Juelich's OPARI, is a -source-to-source instrumentation tool for OpenMP and hybrid codes. -It surrounds OpenMP directives and runtime library calls with calls -to the POMP2 measurement interface. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/opari2/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), -] - -sanity_check_paths = { - 'files': ['bin/opari2', 'include/opari2/pomp2_lib.h'], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/o/OPARI2/OPARI2-2.0.6-GCCcore-10.3.0.eb b/Golden_Repo/o/OPARI2/OPARI2-2.0.6-GCCcore-10.3.0.eb deleted file mode 100644 index 8302c2b6aa61984fe4614da823cc62286e53ddd3..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OPARI2/OPARI2-2.0.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'ConfigureMake' - -name = 'OPARI2' -version = '2.0.6' - -homepage = 'https://www.score-p.org' -description = """ -OPARI2, the successor of Forschungszentrum Juelich's OPARI, is a -source-to-source instrumentation tool for OpenMP and hybrid codes. -It surrounds OpenMP directives and runtime library calls with calls -to the POMP2 measurement interface. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/opari2/tags/opari2-%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - '55972289ce66080bb48622110c3189a36e88a12917635f049b37685b9d3bbcb0', # opari2-2.0.6.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.36.1'), -] - -sanity_check_paths = { - 'files': ['bin/opari2', 'include/opari2/pomp2_lib.h'], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/o/OPARI2/OPARI2-2.0.6-GCCcore-9.3.0.eb b/Golden_Repo/o/OPARI2/OPARI2-2.0.6-GCCcore-9.3.0.eb deleted file mode 100644 index 80697d30f34c1ace7c9fda3dbf21d975cc5f6d16..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OPARI2/OPARI2-2.0.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'ConfigureMake' - -name = 'OPARI2' -version = '2.0.6' - -homepage = 'https://www.score-p.org' -description = """ -OPARI2, the successor of Forschungszentrum Juelich's OPARI, is a -source-to-source instrumentation tool for OpenMP and hybrid codes. -It surrounds OpenMP directives and runtime library calls with calls -to the POMP2 measurement interface. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/opari2/tags/opari2-%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - '55972289ce66080bb48622110c3189a36e88a12917635f049b37685b9d3bbcb0', # opari2-2.0.6.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), -] - -sanity_check_paths = { - 'files': ['bin/opari2', 'include/opari2/pomp2_lib.h'], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/o/OSPRay/OSPRay-1.8.4-gpsmpi-2020.eb b/Golden_Repo/o/OSPRay/OSPRay-1.8.4-gpsmpi-2020.eb deleted file mode 100644 index f2036c2bffa4e00f67aa81a3e5c4afc405add525..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OSPRay/OSPRay-1.8.4-gpsmpi-2020.eb +++ /dev/null @@ -1,53 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'OSPRay' -version = '1.8.4' - -homepage = 'http://www.ospray.org/' -description = """ -OSPRay is an open source, scalable, and portable ray tracing engine for -high-performance, high-fidelity visualization on Intel® Architecture CPUs. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/ospray/OSPRay/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('ispc', '1.12.0', '', SYSTEM), - ('CMake', '3.18.0'), - ('Doxygen', '1.8.18') -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('Qt5', '5.14.2'), - ('tbb', '2020.3'), - ('Embree', '3.8.0') -] - -separate_build_dir = True - -configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' -configopts += '-DOSPRAY_TASKING_SYSTEM=TBB ' -configopts += '-DOSPRAY_INSTALL_DEPENDENCIES=OFF ' -configopts += '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-Dembree_DIR=$EBROOTEMBREE/lib64/cmake/embree-3.8.0 ' - -sanity_check_paths = { - 'dirs': ['bin', 'include/ospray/ospcommon', - 'lib64/cmake/%(namelower)s-%(version)s', 'share/doc/OSPRay'], - 'files': ['bin/ospBenchmark', 'include/ospray/version.h', - 'lib64/libospray.so', - 'share/doc/OSPRay/README.md'], -} - -modextrapaths = {'CMAKE_MODULE_PATH': ['lib64/cmake/ospray-%(version)s']} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OSPRay/OSPRay-2.6.0-gpsmpi-2021.eb b/Golden_Repo/o/OSPRay/OSPRay-2.6.0-gpsmpi-2021.eb deleted file mode 100644 index 6c11529f03cbb27927d88558ec0e8fecad5ae21c..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OSPRay/OSPRay-2.6.0-gpsmpi-2021.eb +++ /dev/null @@ -1,65 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'OSPRay' -version = '2.6.0' - -homepage = 'http://www.ospray.org/' -description = """ -OSPRay is an open source, scalable, and portable ray tracing engine for -high-performance, high-fidelity visualization on Intel® Architecture CPUs. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/ospray/OSPRay/archive/'] -sources = ['v%(version)s.tar.gz'] - -checksums = ['5efccd7eff5774b77f8894e68a6b803b535a0d12f32ab49edf13b954e2848f2e'] - -builddependencies = [ - ('ispc', '1.14.1', '', SYSTEM), - ('CMake', '3.18.0', '', SYSTEM), - ('Doxygen', '1.8.18') -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('Qt5', '5.14.2'), - ('tbb', '2020.3'), - ('Embree', '3.12.2'), - ('rkcommon', '1.6.1'), - ('openvkl', '0.13.0'), - ('OpenImageDenoise', '1.2.4', '', ('gcccoremkl', '10.3.0-2021.2.0')), -] - -separate_build_dir = True - -configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' -configopts += '-DOSPRAY_TASKING_SYSTEM=OPENMP ' -configopts += '-DOSPRAY_INSTALL_DEPENDENCIES=OFF ' -configopts += '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DOSPRAY_BUILD_ISA=ALL ' -configopts += '-Dembree_DIR=$EBROOTEMBREE/lib64/cmake/embree-3.12.2 ' -configopts += '-DOSPRAY_APPS_EXAMPLES:BOOL=OFF ' -configopts += '-DOSPRAY_APPS_TESTING:BOOL=OFF ' -configopts += '-DOSPRAY_ENABLE_APPS:BOOL=OFF ' -configopts += '-DOSPRAY_MODULE_DENOISER:BOOL=ON ' -configopts += '-DOSPRAY_MODULE_MPI:BOOL=True ' -configopts += '-DOSPRAY_MPI_BUILD_TUTORIALS:BOOL=OFF ' - -sanity_check_paths = { - 'dirs': ['include/ospray/', - 'lib64/cmake/%(namelower)s-%(version)s', 'share/doc/OSPRay'], - 'files': ['include/ospray/version.h', - 'lib64/libospray.so', - 'share/doc/OSPRay/README.md'], -} - -modextrapaths = {'CMAKE_MODULE_PATH': ['lib64/cmake/ospray-%(version)s']} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OSPRay/ospcommon-1.2.0-GCC-9.3.0.eb b/Golden_Repo/o/OSPRay/ospcommon-1.2.0-GCC-9.3.0.eb deleted file mode 100644 index 3c1fddceebd91cfb7e54288764f2edf6d3593c73..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OSPRay/ospcommon-1.2.0-GCC-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ospcommon' -version = '1.2.0' - -homepage = 'http://www.ospray.org/' -description = """ -OSPRay is an open source, scalable, and portable ray tracing engine for -high-performance, high-fidelity visualization on Intel® Architecture CPUs. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -# optarch interferes with embree's build system, where -xAVX (or similar is specified) -toolchainopts = {'optarch': False, 'pic': True} - -source_urls = ['https://github.com/ospray/ospcommon/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('tbb', '2020.3'), -] - -checksums = ['62cf307d018648a78f5863b18214579097c3165e60158f67ed506805b9543756'] - -separate_build_dir = True - -start_dir = '%(name)s-%(version)s' - -sanity_check_paths = { - 'dirs': ['bin', 'include/ospcommon'], - 'files': ['lib/libospcommon.so'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OTF2/OTF2-2.2-GCCcore-9.3.0.eb b/Golden_Repo/o/OTF2/OTF2-2.2-GCCcore-9.3.0.eb deleted file mode 100644 index 6df4d8229f731104f1bce8017ec9ea88d91adc0a..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OTF2/OTF2-2.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,51 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2020 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'OTF2' -version = '2.2' - -homepage = 'http://www.score-p.org' -description = """ -The Open Trace Format 2 is a highly scalable, memory efficient event trace -data format plus support library. It is the new standard trace format for -Scalasca, Vampir, and TAU and is open for other tools. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/otf2/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'cfedf583bf000e98ce0c86e663e5ded0', # otf2-2.2.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), - # SIONlib container support (optional): - ('SIONlib', '1.7.6', '-tools'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/otf2-config', 'include/otf2/otf2.h', - ('lib/libotf2.a', 'lib64/libotf2.a'), - ('lib/libotf2.%s' % SHLIB_EXT, 'lib64/libotf2.%s' % SHLIB_EXT)], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/o/OTF2/OTF2-2.3-GCCcore-10.3.0.eb b/Golden_Repo/o/OTF2/OTF2-2.3-GCCcore-10.3.0.eb deleted file mode 100644 index 16ea268cc31328bdfe99bb1918446116022f7469..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OTF2/OTF2-2.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,51 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'OTF2' -version = '2.3' - -homepage = 'https://www.score-p.org' -description = """ -The Open Trace Format 2 is a highly scalable, memory efficient event trace -data format plus support library. It is the new standard trace format for -Scalasca, Vampir, and TAU and is open for other tools. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/otf2/tags/otf2-%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - '36957428d37c40d35b6b45208f050fb5cfe23c54e874189778a24b0e9219c7e3', # otf2-2.3.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.36.1'), - # SIONlib container support (optional): - ('SIONlib', '1.7.7', '-tools'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/otf2-config', 'include/otf2/otf2.h', - ('lib/libotf2.a', 'lib64/libotf2.a'), - ('lib/libotf2.%s' % SHLIB_EXT, 'lib64/libotf2.%s' % SHLIB_EXT)], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/o/OTF2/OTF2-2.3-GCCcore-9.3.0.eb b/Golden_Repo/o/OTF2/OTF2-2.3-GCCcore-9.3.0.eb deleted file mode 100644 index cfc5b489ec8905f83e7bba0c71efeef46294a701..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OTF2/OTF2-2.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,51 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'OTF2' -version = '2.3' - -homepage = 'https://www.score-p.org' -description = """ -The Open Trace Format 2 is a highly scalable, memory efficient event trace -data format plus support library. It is the new standard trace format for -Scalasca, Vampir, and TAU and is open for other tools. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/otf2/tags/otf2-%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - '36957428d37c40d35b6b45208f050fb5cfe23c54e874189778a24b0e9219c7e3', # otf2-2.3.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), - # SIONlib container support (optional): - ('SIONlib', '1.7.6', '-tools'), -] - -configopts = '--enable-shared' - -sanity_check_paths = { - 'files': ['bin/otf2-config', 'include/otf2/otf2.h', - ('lib/libotf2.a', 'lib64/libotf2.a'), - ('lib/libotf2.%s' % SHLIB_EXT, 'lib64/libotf2.%s' % SHLIB_EXT)], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/o/Octave/Octave-5.2.0-gpsmkl-2020.eb b/Golden_Repo/o/Octave/Octave-5.2.0-gpsmkl-2020.eb deleted file mode 100644 index b6b1338fda75e0f34d9d7af7f6fd3ef687169608..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/Octave/Octave-5.2.0-gpsmkl-2020.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Octave' -version = '5.2.0' - -homepage = 'http://www.gnu.org/software/octave/' -description = """GNU Octave is a high-level interpreted language, primarily intended for numerical computations.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('Bison', '3.6.4'), - ('flex', '2.6.4'), - ('pkg-config', '0.29.2'), - ('texinfo', '6.7'), - ('gperf', '3.1'), - ('Autotools', '20200321'), -] - -dependencies = [ - ('X11', '20200222'), - ('PCRE', '8.44'), - ('ncurses', '6.2'), - ('libreadline', '8.0'), - ('ARPACK-NG', '3.7.0'), - ('cURL', '7.71.1'), - ('FLTK', '1.3.5'), - ('fontconfig', '2.13.92'), - ('freetype', '2.10.1'), - ('GLPK', '4.65'), - ('GL2PS', '1.4.2'), - ('gnuplot', '5.2.8'), - ('Java', '1.8', '', SYSTEM), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('zlib', '1.2.11'), - ('Ghostscript', '9.52'), - ('Qhull', '2020.1'), - ('Qt5', '5.14.2'), - ('HDF5', '1.10.6'), - ('qrupdate', '1.1.2'), - ('SuiteSparse', '5.7.1', '-CUDA'), - ('GraphicsMagick', '1.3.35'), - ('FFTW', '3.3.8'), - ('texlive', '20200406'), -] - -configopts = 'MOC=$EBROOTQT5/bin/moc ' -configopts += 'UIC=$EBROOTQT5/bin/uic ' -configopts += 'RCC=$EBROOTQT5/bin/rcc ' -configopts += 'LRELEASE=$EBROOTQT5/bin/lrelease ' -configopts += '--with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK" --disable-docs ' -# correct for both GCC and Intel compilers -configopts += '--enable-fortran-calling-convention=gfortran ' - -sanity_check_paths = { - 'files': ['bin/octave'], - 'dirs': [] -} - -sanity_check_commands = [('octave-cli', '--eval "1+2"')] - -moduleclass = 'math' diff --git a/Golden_Repo/o/Octave/Octave-6.1.0-gcccoremkl-10.3.0-2021.2.0-nompi.eb b/Golden_Repo/o/Octave/Octave-6.1.0-gcccoremkl-10.3.0-2021.2.0-nompi.eb deleted file mode 100644 index f3764e426eb138f52e2dbebfabdc56e93bbac2f1..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/Octave/Octave-6.1.0-gcccoremkl-10.3.0-2021.2.0-nompi.eb +++ /dev/null @@ -1,71 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Octave' -version = '6.1.0' -versionsuffix = '-nompi' - -homepage = 'http://www.gnu.org/software/octave/' -description = """GNU Octave is a high-level interpreted language, primarily intended for numerical computations.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Bison', '3.7.6'), - ('flex', '2.6.4'), - ('pkg-config', '0.29.2'), - ('texinfo', '6.7'), - ('gperf', '3.1'), - ('Autotools', '20200321'), -] - -dependencies = [ - ('X11', '20200222'), - ('PCRE', '8.44'), - ('ncurses', '6.2'), - ('libreadline', '8.0'), - ('ARPACK-NG', '3.8.0', '-nompi'), - ('cURL', '7.71.1'), - ('FLTK', '1.3.5'), - ('fontconfig', '2.13.92'), - ('freetype', '2.10.1'), - ('GLPK', '4.65'), - ('GL2PS', '1.4.2'), - ('gnuplot', '5.2.8'), - ('Java', '15', '', SYSTEM), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('zlib', '1.2.11'), - ('Ghostscript', '9.52'), - ('Qhull', '2020.1'), - ('Qt5', '5.14.2'), - ('HDF5', '1.10.6', '-serial'), - ('qrupdate', '1.1.2'), - ('GraphicsMagick', '1.3.35'), - ('FFTW', '3.3.8', '-nompi'), - ('texlive', '20200406'), -] - -maxparallel = 12 - -configopts = 'MOC=$EBROOTQT5/bin/moc ' -configopts += 'UIC=$EBROOTQT5/bin/uic ' -configopts += 'RCC=$EBROOTQT5/bin/rcc ' -configopts += 'LRELEASE=$EBROOTQT5/bin/lrelease ' -configopts += '--with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK" --disable-docs ' -# correct for both GCC and Intel compilers -configopts += '--enable-fortran-calling-convention=gfortran ' - -sanity_check_paths = { - 'files': ['bin/octave'], - 'dirs': [] -} - -sanity_check_commands = [('octave-cli', '--eval "1+2"')] - -moduleclass = 'math' diff --git a/Golden_Repo/o/Octave/Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-nompi.eb b/Golden_Repo/o/Octave/Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-nompi.eb deleted file mode 100644 index badffaf060aef7e4c80c3c4127e8e976f2dd6d08..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/Octave/Octave-6.1.0-gcccoremkl-9.3.0-2020.2.254-nompi.eb +++ /dev/null @@ -1,71 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Octave' -version = '6.1.0' -versionsuffix = '-nompi' - -homepage = 'http://www.gnu.org/software/octave/' -description = """GNU Octave is a high-level interpreted language, primarily intended for numerical computations.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), - ('Bison', '3.6.4'), - ('flex', '2.6.4'), - ('pkg-config', '0.29.2'), - ('texinfo', '6.7'), - ('gperf', '3.1'), - ('Autotools', '20200321'), -] - -dependencies = [ - ('X11', '20200222'), - ('PCRE', '8.44'), - ('ncurses', '6.2'), - ('libreadline', '8.0'), - ('ARPACK-NG', '3.7.0', '-nompi'), - ('cURL', '7.71.1'), - ('FLTK', '1.3.5'), - ('fontconfig', '2.13.92'), - ('freetype', '2.10.1'), - ('GLPK', '4.65'), - ('GL2PS', '1.4.2'), - ('gnuplot', '5.2.8'), - ('Java', '15', '', SYSTEM), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('zlib', '1.2.11'), - ('Ghostscript', '9.52'), - ('Qhull', '2020.1'), - ('Qt5', '5.14.2'), - ('HDF5', '1.10.6', '-serial'), - ('qrupdate', '1.1.2'), - ('GraphicsMagick', '1.3.35'), - ('FFTW', '3.3.8', '-nompi'), - ('texlive', '20200406'), -] - -maxparallel = 12 - -configopts = 'MOC=$EBROOTQT5/bin/moc ' -configopts += 'UIC=$EBROOTQT5/bin/uic ' -configopts += 'RCC=$EBROOTQT5/bin/rcc ' -configopts += 'LRELEASE=$EBROOTQT5/bin/lrelease ' -configopts += '--with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK" --disable-docs ' -# correct for both GCC and Intel compilers -configopts += '--enable-fortran-calling-convention=gfortran ' - -sanity_check_paths = { - 'files': ['bin/octave'], - 'dirs': [] -} - -sanity_check_commands = [('octave-cli', '--eval "1+2"')] - -moduleclass = 'math' diff --git a/Golden_Repo/o/Octave/Octave-6.1.0-gpsmkl-2020.eb b/Golden_Repo/o/Octave/Octave-6.1.0-gpsmkl-2020.eb deleted file mode 100644 index 5d9b0aa5f49a3b83f4f976bae098fac37c92833c..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/Octave/Octave-6.1.0-gpsmkl-2020.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Octave' -version = '6.1.0' - -homepage = 'http://www.gnu.org/software/octave/' -description = """GNU Octave is a high-level interpreted language, primarily intended for numerical computations.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('Bison', '3.6.4'), - ('flex', '2.6.4'), - ('pkg-config', '0.29.2'), - ('texinfo', '6.7'), - ('gperf', '3.1'), - ('Autotools', '20200321'), -] - -dependencies = [ - ('X11', '20200222'), - ('PCRE', '8.44'), - ('ncurses', '6.2'), - ('libreadline', '8.0'), - ('ARPACK-NG', '3.7.0'), - ('cURL', '7.71.1'), - ('FLTK', '1.3.5'), - ('fontconfig', '2.13.92'), - ('freetype', '2.10.1'), - ('GLPK', '4.65'), - ('GL2PS', '1.4.2'), - ('gnuplot', '5.2.8'), - ('Java', '15', '', SYSTEM), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('zlib', '1.2.11'), - ('Ghostscript', '9.52'), - ('Qhull', '2020.1'), - ('Qt5', '5.14.2'), - ('HDF5', '1.10.6'), - ('qrupdate', '1.1.2'), - ('SuiteSparse', '5.7.1', '-CUDA'), - ('GraphicsMagick', '1.3.35'), - ('FFTW', '3.3.8'), - ('texlive', '20200406'), -] - -configopts = 'MOC=$EBROOTQT5/bin/moc ' -configopts += 'UIC=$EBROOTQT5/bin/uic ' -configopts += 'RCC=$EBROOTQT5/bin/rcc ' -configopts += 'LRELEASE=$EBROOTQT5/bin/lrelease ' -configopts += '--with-blas="$LIBBLAS" --with-lapack="$LIBLAPACK" --disable-docs ' -# correct for both GCC and Intel compilers -configopts += '--enable-fortran-calling-convention=gfortran ' - -sanity_check_paths = { - 'files': ['bin/octave'], - 'dirs': [] -} - -sanity_check_commands = [('octave-cli', '--eval "1+2"')] - -moduleclass = 'math' diff --git a/Golden_Repo/o/OpenAI-Gym/OpenAI-Gym-0.18.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/o/OpenAI-Gym/OpenAI-Gym-0.18.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index d0f2aaf3150cf3807f4f9bb2593fc7a7b9c4f607..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenAI-Gym/OpenAI-Gym-0.18.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,63 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'OpenAI-Gym' -version = '0.18.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://gym.openai.com' -description = "A toolkit for developing and comparing reinforcement learning algorithms." - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('CMake', '3.18.0'), - ('SWIG', '4.0.2', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), # for numpy, scipy - ('Pillow-SIMD', '7.0.0.post3', versionsuffix), - ('OpenCV', '4.5.0', versionsuffix), - ('scikit', '2020', versionsuffix), - ('XServer', '1.20.9'), -] - -use_pip = True - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('EasyProcess', '0.3', { - }), - ('PyVirtualDisplay', '2.0', { - }), - ('pyglet', '1.4.10', { - 'source_tmpl': 'pyglet-%(version)s.zip', - 'checksums': ['c57e3e18246f45e4d6bb3d29e39d128d6e72b05f4212b10353adc3ba260ceb65'], - }), - ('atari-py', '0.2.6', { - 'checksums': ['6249ad5079b0489e87eb44e65485bb1b07cc1b5af729f1ee52ece749503ceb1d'], - }), - ('box2d-py', '2.3.8', { - 'modulename': 'Box2D', - 'checksums': ['bdacfbbc56079bb317548efe49d3d5a86646885cc27f4a2ee97e4b2960921ab7'], - }), - ('gym', version, { - # can't use 'all', because 'mujoco' and 'robotics' extras require MuJoCo (which is not freely available); - # see https://github.com/openai/mujoco-py#install-mujoco - 'use_pip_extras': 'atari,box2d,classic_control', - 'checksums': ['a0dcd25c1373f3938f4cb4565f74f434fba6faefb73a42d09c9dddd0c08af53e'], - 'patches': ['video_recorder.patch'], - }), -] - -local_envs = ['algorithmic', 'atari', 'box2d', 'classic_control', 'toy_text'] -sanity_check_commands = ["python -c 'import gym.envs.%s'" % e for e in local_envs] - -sanity_pip_check = True - -moduleclass = 'tools' diff --git a/Golden_Repo/o/OpenAI-Gym/video_recorder.patch b/Golden_Repo/o/OpenAI-Gym/video_recorder.patch deleted file mode 100644 index 39fdaeda124c9805b513bb00dc5d3ce962b2a8a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenAI-Gym/video_recorder.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- gym/gym-0.18.0/gym/wrappers/monitoring/video_recorder.py.orig 2021-02-15 16:53:44.341866890 +0100 -+++ gym/gym-0.18.0/gym/wrappers/monitoring/video_recorder.py 2021-02-15 16:54:31.089455521 +0100 -@@ -300,7 +300,7 @@ - if frame.dtype != np.uint8: - raise error.InvalidFrame("Your frame has data type {}, but we require uint8 (i.e. RGB values from 0-255).".format(frame.dtype)) - -- self.proc.stdin.write(frame.tobytes()) -+ self.proc.stdin.write(frame.tobytes()) - - def close(self): - self.proc.stdin.close() diff --git a/Golden_Repo/o/OpenCV/OpenCV-4.5.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/o/OpenCV/OpenCV-4.5.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 55f91ea4d783de02e63638793c1539353274a39e..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenCV/OpenCV-4.5.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,77 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'OpenCV' -version = '4.5.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://opencv.org/' -description = """OpenCV (Open Source Computer Vision Library) is an open source computer vision - and machine learning software library. OpenCV was built to provide - a common infrastructure for computer vision applications and to accelerate - the use of machine perception in the commercial products.""" - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True, 'optarch': True} - -source_urls = [ - 'https://github.com/opencv/opencv/archive/', -] - -sources = [ - '%(version)s.zip', -] - -checksums = [ - '168f6e61d8462fb3d5a29ba0d19c0375c111125cac753ad01035a359584ccde9' -] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), - ('zlib', '1.2.11'), - ('FFmpeg', '4.3.1'), - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), - ('JasPer', '2.0.19'), - ('GLib', '2.64.4'), - ('GTK+', '3.24.17',), - ('protobuf', '3.13.0'), - ('Eigen', '3.3.7'), - ('OpenEXR', '2.5.2'), - ('freetype', '2.10.1'), - ('OpenGL', '2020'), -] - -separate_build_dir = True - -configopts = "-D CMAKE_BUILD_TYPE=RELEASE " -configopts += '-D CMAKE_CXX_FLAGS="-Wdeprecated-declarations" ' - -configopts += "-D OPENCV_GENERATE_PKGCONFIG=ON " -configopts += "-D ENABLE_PRECOMPILED_HEADERS=OFF " - -configopts += "-D BUILD_EXAMPLES=ON " -configopts += "-D INSTALL_PYTHON_EXAMPLES=ON " - -configopts += "-D Protobuf_INCLUDE_DIR=$EBROOTPROTOBUF/include " -configopts += "-D Protobuf_LIBRARY=$EBROOTPROTOBUF/lib64/libprotobuf.so " -configopts += "-D Protobuf_LITE_LIBRARY_RELEASE=$EBROOTPROTOBUF/lib64/libprotobuf-lite.so " -configopts += "-D Protobuf_PROTOC_LIBRARY_RELEASE=$EBROOTPROTOBUF/lib64/bprotoc.so " -configopts += "-D BUILD_PROTOBUF=OFF -DPROTOBUF_UPDATE_FILES=ON " - -configopts += "-D BUILD_JAVA=OFF " - -configopts += "-D PYTHON_DEFAULT_EXECUTABLE=$EBROOTPYTHON/bin/python3 " -configopts += "-D PYTHON2_EXECUTABLE='' " # ensure python2 is NOT used - -configopts += "-D WITH_OPENMP=ON " - -modextrapaths = {'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages/'} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OpenCV/OpenCV-4.5.3-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/o/OpenCV/OpenCV-4.5.3-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index 7dea9a3290b16828d68b0bfe9a442bf49ad94dd7..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenCV/OpenCV-4.5.3-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,79 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'OpenCV' -version = '4.5.3' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://opencv.org/' -description = """OpenCV (Open Source Computer Vision Library) is an open source computer vision - and machine learning software library. OpenCV was built to provide - a common infrastructure for computer vision applications and to accelerate - the use of machine perception in the commercial products.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True, 'optarch': True} - -source_urls = [ - 'https://github.com/opencv/opencv/archive/', -] - -sources = [ - '%(version)s.zip', -] - -checksums = [ - 'a61e7a4618d353140c857f25843f39b2abe5f451b018aab1604ef0bc34cd23d5' -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), - ('zlib', '1.2.11'), - ('FFmpeg', '4.4'), - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0'), - ('JasPer', '2.0.19'), - ('GLib', '2.64.4'), - ('GTK+', '3.24.17',), - ('protobuf', '3.13.0'), - ('Eigen', '3.3.7'), - ('OpenEXR', '2.5.2'), - ('freetype', '2.10.1'), - ('OpenGL', '2020'), -] - -separate_build_dir = True - -configopts = "-D CMAKE_BUILD_TYPE=RELEASE " -configopts += '-D CMAKE_CXX_FLAGS="-Wdeprecated-declarations" ' - -configopts += "-D OPENCV_GENERATE_PKGCONFIG=ON " -configopts += "-D ENABLE_PRECOMPILED_HEADERS=OFF " - -configopts += "-D BUILD_EXAMPLES=ON " -configopts += "-D INSTALL_PYTHON_EXAMPLES=ON " - -configopts += "-D Protobuf_INCLUDE_DIR=$EBROOTPROTOBUF/include " -configopts += "-D Protobuf_LIBRARY=$EBROOTPROTOBUF/lib64/libprotobuf.so " -configopts += "-D Protobuf_LITE_LIBRARY_RELEASE=$EBROOTPROTOBUF/lib64/libprotobuf-lite.so " -configopts += "-D Protobuf_PROTOC_LIBRARY_RELEASE=$EBROOTPROTOBUF/lib64/bprotoc.so " -configopts += "-D BUILD_PROTOBUF=OFF -DPROTOBUF_UPDATE_FILES=ON " - -configopts += "-D BUILD_JAVA=OFF " - -configopts += "-D PYTHON_DEFAULT_EXECUTABLE=$EBROOTPYTHON/bin/python3 " -configopts += "-D PYTHON2_EXECUTABLE='' " # ensure python2 is NOT used - -configopts += "-D WITH_OPENMP=ON " - -modextrapaths = {'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages/'} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OpenEXR/OpenEXR-2.5.2-GCCcore-10.3.0.eb b/Golden_Repo/o/OpenEXR/OpenEXR-2.5.2-GCCcore-10.3.0.eb deleted file mode 100644 index e01e3b2b953c7d6b605bb63659ebacf873d996b7..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenEXR/OpenEXR-2.5.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'OpenEXR' -version = '2.5.2' - -homepage = 'https://www.openexr.com/' -description = """OpenEXR is a high dynamic-range (HDR) image file format developed by Industrial Light & Magic - for use in computer imaging applications""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/openexr/openexr/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['5da8dff448d0c4a529e52c97daf238a461d01cd233944f75095668d6d7528761'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -configopts = '-DOPENEXR_BUILD_PYTHON_LIBS=OFF' - -sanity_check_paths = { - 'files': ['lib/lib%s-%%(version_major)s_%%(version_minor)s.%s' % (x, SHLIB_EXT) for x in - ['Half', 'Iex', 'IexMath', 'IlmImf', 'IlmImfUtil', 'IlmThread', 'Imath']] + - ['bin/exr%s' % x for x in - ['envmap', 'header', 'makepreview', 'maketiled', 'multipart', 'multiview', 'stdattr']], - 'dirs': ['include/OpenEXR', 'share'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OpenEXR/OpenEXR-2.5.2-GCCcore-9.3.0.eb b/Golden_Repo/o/OpenEXR/OpenEXR-2.5.2-GCCcore-9.3.0.eb deleted file mode 100644 index c985a2d97033317aaecc951948f42aeaf5a9c612..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenEXR/OpenEXR-2.5.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'OpenEXR' -version = '2.5.2' - -homepage = 'https://www.openexr.com/' -description = """OpenEXR is a high dynamic-range (HDR) image file format developed by Industrial Light & Magic - for use in computer imaging applications""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/openexr/openexr/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['5da8dff448d0c4a529e52c97daf238a461d01cd233944f75095668d6d7528761'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -configopts = '-DOPENEXR_BUILD_PYTHON_LIBS=OFF' - -sanity_check_paths = { - 'files': ['lib/lib%s-%%(version_major)s_%%(version_minor)s.%s' % (x, SHLIB_EXT) for x in - ['Half', 'Iex', 'IexMath', 'IlmImf', 'IlmImfUtil', 'IlmThread', 'Imath']] + - ['bin/exr%s' % x for x in - ['envmap', 'header', 'makepreview', 'maketiled', 'multipart', 'multiview', 'stdattr']], - 'dirs': ['include/OpenEXR', 'share'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OpenFOAM/OpenFOAM-6-SLURM.patch b/Golden_Repo/o/OpenFOAM/OpenFOAM-6-SLURM.patch deleted file mode 100644 index d1279153e18bea944d4f82b9fe165c4213ea5a22..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenFOAM/OpenFOAM-6-SLURM.patch +++ /dev/null @@ -1,33 +0,0 @@ -# Change the mpirun command to "srun". -# As of writing, only the srun is supported at JSC supercomputers. -# Author: Metin Cakircali <m.cakircali@fz-juelich.de> - -diff -ru OpenFOAM-6-version-6_orig/bin/foamJob OpenFOAM-6-version-6/bin/foamJob ---- OpenFOAM-6-version-6_orig/bin/foamJob 2018-10-17 11:22:43.363505000 +0200 -+++ OpenFOAM-6-version-6/bin/foamJob 2018-10-17 12:12:27.565678000 +0200 -@@ -195,8 +195,8 @@ - # - # Find mpirun - # -- mpirun=$(findExec mpirun) || error "'mpirun' not found" -- mpiopts="-np $NPROCS" -+ mpirun=$(findExec srun) || error "'srun' not found" -+ mpiopts="-n $NPROCS" - - # - # Check if the machine ready to run parallel -diff -ru OpenFOAM-6-version-6_orig/bin/tools/RunFunctions OpenFOAM-6-version-6/bin/tools/RunFunctions ---- OpenFOAM-6-version-6_orig/bin/tools/RunFunctions 2018-10-17 11:22:43.319389000 +0200 -+++ OpenFOAM-6-version-6/bin/tools/RunFunctions 2018-10-17 11:24:49.276331000 +0200 -@@ -139,9 +139,9 @@ - else - echo "Running $APP_RUN in parallel on $PWD using $nProcs processes" - if [ "$LOG_APPEND" = "true" ]; then -- ( mpirun -np $nProcs $APP_RUN -parallel "$@" < /dev/null >> log.$LOG_SUFFIX 2>&1 ) -+ ( srun -n $nProcs $APP_RUN -parallel "$@" < /dev/null >> log.$LOG_SUFFIX 2>&1 ) - else -- ( mpirun -np $nProcs $APP_RUN -parallel "$@" < /dev/null > log.$LOG_SUFFIX 2>&1 ) -+ ( srun -n $nProcs $APP_RUN -parallel "$@" < /dev/null > log.$LOG_SUFFIX 2>&1 ) - fi - fi - } diff --git a/Golden_Repo/o/OpenFOAM/OpenFOAM-6-cleanup.patch b/Golden_Repo/o/OpenFOAM/OpenFOAM-6-cleanup.patch deleted file mode 100644 index e8be7a70b145802e7cb694b51a8bcfb215954fb0..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenFOAM/OpenFOAM-6-cleanup.patch +++ /dev/null @@ -1,230 +0,0 @@ -# This patch removes all need for the ThirdParty files of OpenFOAM: -# we use EB dependencies for everything. It adjusts the paths, variables, etc -# We also let the install dir, compiler, etc be set by EB. -# Lastly, we also fix a small compile issue in 'ptscotchDecomp.C' -# by Kenneth Hoste (HPC-UGent), based on patch for OpenFOAM 4.1 by Ward Poelmans <wpoely86@gmail.com> -diff -ru OpenFOAM-6-version-6_orig/applications/utilities/mesh/manipulation/setSet/Allwmake OpenFOAM-6-version-6/applications/utilities/mesh/manipulation/setSet/Allwmake ---- OpenFOAM-6-version-6_orig/applications/utilities/mesh/manipulation/setSet/Allwmake 2018-10-17 11:22:35.628646000 +0200 -+++ OpenFOAM-6-version-6/applications/utilities/mesh/manipulation/setSet/Allwmake 2018-10-17 13:32:16.457222000 +0200 -@@ -7,18 +7,18 @@ - unset COMP_FLAGS LINK_FLAGS - - # Use readline if available --if [ -f /usr/include/readline/readline.h ] -+if [ -f $EBROOTLIBREADLINE/include/readline/readline.h ] - then - echo " found <readline/readline.h> -- enabling readline support." - export COMP_FLAGS="-DHAS_READLINE" - - # readline may require ncurses -- if [ -f /usr/include/ncurses/ncurses.h ] -+ if [ -f $EBROOTNCURSES/include/ncurses/ncurses.h ] - then - echo " found <ncurses/ncurses.h> -- maybe required by readline." -- export LINK_FLAGS="-lreadline -lncurses" -+ export LINK_FLAGS="-L$EBROOTLIBREADLINE/lib -lreadline -L$EBROOTNCURSES -lncurses" - else -- export LINK_FLAGS="-lreadline" -+ export LINK_FLAGS="-L$EBROOTLIBREADLINE/lib -lreadline" - fi - fi - -diff -ru OpenFOAM-6-version-6_orig/applications/utilities/postProcessing/graphics/PVReaders/Allwmake OpenFOAM-6-version-6/applications/utilities/postProcessing/graphics/PVReaders/Allwmake ---- OpenFOAM-6-version-6_orig/applications/utilities/postProcessing/graphics/PVReaders/Allwmake 2018-10-17 11:22:38.725154000 +0200 -+++ OpenFOAM-6-version-6/applications/utilities/postProcessing/graphics/PVReaders/Allwmake 2018-10-17 13:54:51.187898000 +0200 -@@ -14,8 +14,8 @@ - } - - # ensure CMake gets the correct C/C++ compilers -- [ -n "$WM_CC" ] && export CC="$WM_CC" -- [ -n "$WM_CXX" ] && export CXX="$WM_CXX" -+# [ -n "$WM_CC" ] && export CC="$WM_CC" -+# [ -n "$WM_CXX" ] && export CXX="$WM_CXX" - - wmake $targetType vtkPVblockMesh - wmake $targetType vtkPVFoam -diff -ru OpenFOAM-6-version-6_orig/etc/bashrc OpenFOAM-6-version-6/etc/bashrc ---- OpenFOAM-6-version-6_orig/etc/bashrc 2018-10-17 11:22:17.114079000 +0200 -+++ OpenFOAM-6-version-6/etc/bashrc 2018-10-17 13:27:12.688574823 +0200 -@@ -43,8 +43,8 @@ - # Please set to the appropriate path if the default is not correct. - # - [ "$BASH" -o "$ZSH_NAME" ] && \ --export FOAM_INST_DIR=$(cd $(dirname ${BASH_SOURCE:-$0})/../.. && pwd -P) || \ --export FOAM_INST_DIR=$HOME/$WM_PROJECT -+#export FOAM_INST_DIR=$(cd $(dirname ${BASH_SOURCE:-$0})/../.. && pwd -P) || \ -+#export FOAM_INST_DIR=$HOME/$WM_PROJECT - # export FOAM_INST_DIR=~$WM_PROJECT - # export FOAM_INST_DIR=/opt/$WM_PROJECT - # export FOAM_INST_DIR=/usr/local/$WM_PROJECT -diff -ru OpenFOAM-6-version-6_orig/etc/config.sh/CGAL OpenFOAM-6-version-6/etc/config.sh/CGAL ---- OpenFOAM-6-version-6_orig/etc/config.sh/CGAL 2018-10-17 11:22:17.102310000 +0200 -+++ OpenFOAM-6-version-6/etc/config.sh/CGAL 2018-10-17 13:35:41.440894000 +0200 -@@ -35,33 +35,8 @@ - # - #------------------------------------------------------------------------------ - --boost_version=boost-system --cgal_version=cgal-system --#cgal_version=CGAL-4.10 -- --thirdPartyPath=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER -- --if [ "$boost_version" != "boost-system" ] --then -- export BOOST_ARCH_PATH=$thirdPartyPath/$boost_version -- if [ -d "$BOOST_ARCH_PATH" ] -- then -- _foamAddLib $BOOST_ARCH_PATH/lib -- fi --else -- unset BOOST_ARCH_PATH --fi -- --if [ "$cgal_version" != "cgal-system" ] --then -- export CGAL_ARCH_PATH=$thirdPartyPath/$cgal_version -- if [ -d "$CGAL_ARCH_PATH" ] -- then -- _foamAddLib $CGAL_ARCH_PATH/lib$WM_COMPILER_LIB_ARCH -- fi --else -- unset CGAL_ARCH_PATH --fi -+export CGAL_ARCH_PATH=$EBROOTCGAL -+export BOOST_ARCH_PATH=$EBROOTBOOST - - if [ "$FOAM_VERBOSE" -a "$PS1" ] - then -@@ -70,6 +45,4 @@ - echo " $cgal_version at $CGAL_ARCH_PATH" 1>&2 - fi - --unset thirdPartyPath -- - #------------------------------------------------------------------------------ -diff -ru OpenFOAM-6-version-6_orig/etc/config.sh/gperftools OpenFOAM-6-version-6/etc/config.sh/gperftools ---- OpenFOAM-6-version-6_orig/etc/config.sh/gperftools 2018-10-17 11:22:17.090970000 +0200 -+++ OpenFOAM-6-version-6/etc/config.sh/gperftools 2018-10-17 13:54:47.576302000 +0200 -@@ -29,13 +29,7 @@ - # - #------------------------------------------------------------------------------ - --version=svn --gperftools_install=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER -- --GPERFTOOLS_VERSION=gperftools-$version --GPERFTOOLS_ARCH_PATH=$gperftools_install/$GPERFTOOLS_VERSION -- --export PATH=$GPERFTOOLS_ARCH_PATH/bin:$PATH --export LD_LIBRARY_PATH=$GPERFTOOLS_ARCH_PATH/lib:$LD_LIBRARY_PATH -+GPERFTOOLS_VERSION=gperftools-$EBVERSIONGPERFTOOLS -+GPERFTOOLS_ARCH_PATH=$EBROOTGPERFTOOLS - - #------------------------------------------------------------------------------ -diff -ru OpenFOAM-6-version-6_orig/etc/config.sh/metis OpenFOAM-6-version-6/etc/config.sh/metis ---- OpenFOAM-6-version-6_orig/etc/config.sh/metis 2018-10-17 11:22:17.089913000 +0200 -+++ OpenFOAM-6-version-6/etc/config.sh/metis 2018-10-17 13:54:53.260727000 +0200 -@@ -34,7 +34,7 @@ - # - #------------------------------------------------------------------------------ - --export METIS_VERSION=metis-5.1.0 --export METIS_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_LABEL_OPTION/$METIS_VERSION -+export METIS_VERSION=metis-$EBVERSIONMETIS -+export METIS_ARCH_PATH=$EBROOTMETIS - - #------------------------------------------------------------------------------ -diff -ru OpenFOAM-6-version-6_orig/etc/config.sh/mpi OpenFOAM-6-version-6/etc/config.sh/mpi ---- OpenFOAM-6-version-6_orig/etc/config.sh/mpi 2017-07-25 18:43:40.000000000 +0200 -+++ OpenFOAM-6-version-6/etc/config.sh/mpi 2018-09-18 10:51:25.164160755 +0200 -@@ -249,6 +249,9 @@ - _foamAddPath $MPI_ARCH_PATH/bin64 - _foamAddLib $MPI_ARCH_PATH/lib64 - ;; -+EASYBUILDMPI) -+ export FOAM_MPI=mpi -+ ;; - *) - export FOAM_MPI=dummy - ;; -diff -ru OpenFOAM-6-version-6_orig/etc/config.sh/scotch OpenFOAM-6-version-6/etc/config.sh/scotch ---- OpenFOAM-6-version-6_orig/etc/config.sh/scotch 2018-10-17 11:22:17.074307000 +0200 -+++ OpenFOAM-6-version-6/etc/config.sh/scotch 2018-10-17 13:54:54.076619000 +0200 -@@ -37,7 +37,7 @@ - # - #------------------------------------------------------------------------------ - --export SCOTCH_VERSION=scotch_6.0.6 --export SCOTCH_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_LABEL_OPTION/$SCOTCH_VERSION -+export SCOTCH_VERSION=scotch_$EBVERSIONSCOTCH -+export SCOTCH_ARCH_PATH=$EBROOTSCOTCH - - #------------------------------------------------------------------------------ -diff -ru OpenFOAM-6-version-6_orig/etc/config.sh/settings OpenFOAM-6-version-6/etc/config.sh/settings ---- OpenFOAM-6-version-6_orig/etc/config.sh/settings 2018-10-17 11:22:17.081439000 +0200 -+++ OpenFOAM-6-version-6/etc/config.sh/settings 2018-10-17 13:54:56.013405000 +0200 -@@ -61,11 +61,11 @@ - 64) - WM_ARCH=linux64 - export WM_COMPILER_LIB_ARCH=64 -- export WM_CC='gcc' -- export WM_CXX='g++' -- export WM_CFLAGS='-m64 -fPIC' -- export WM_CXXFLAGS='-m64 -fPIC -std=c++0x' -- export WM_LDFLAGS='-m64' -+ export WM_CC=$CC -+ export WM_CXX=$CXX -+ export WM_CFLAGS=$CFLAGS -+ export WM_CXXFLAGS=$CXXFLAGS -+ export WM_LDFLAGS=$LDFLAGS - ;; - *) - echo "Unknown WM_ARCH_OPTION '$WM_ARCH_OPTION', should be 32 or 64"\ -diff -ru OpenFOAM-6-version-6_orig/src/parallel/decompose/ptscotchDecomp/Make/options OpenFOAM-6-version-6/src/parallel/decompose/ptscotchDecomp/Make/options ---- OpenFOAM-6-version-6_orig/src/parallel/decompose/ptscotchDecomp/Make/options 2018-10-17 11:23:46.338261000 +0200 -+++ OpenFOAM-6-version-6/src/parallel/decompose/ptscotchDecomp/Make/options 2018-10-17 13:47:29.990948000 +0200 -@@ -4,9 +4,6 @@ - EXE_INC = \ - $(PFLAGS) $(PINC) \ - -I$(FOAM_SRC)/Pstream/mpi/lnInclude \ -- -I$(SCOTCH_ARCH_PATH)/include/$(FOAM_MPI) \ -- -I$(SCOTCH_ARCH_PATH)/include \ -- -I/usr/include/scotch \ - -I../decompositionMethods/lnInclude - - LIB_LIBS = \ -@@ -14,5 +11,5 @@ - -L$(FOAM_EXT_LIBBIN)/$(FOAM_MPI) \ - -lptscotch \ - -lptscotcherrexit \ -- -lscotch \ -+ -lscotch ${LINK_FLAGS} \ - -lrt -diff -ru OpenFOAM-6-version-6_orig/src/parallel/decompose/ptscotchDecomp/ptscotchDecomp.C OpenFOAM-6-version-6/src/parallel/decompose/ptscotchDecomp/ptscotchDecomp.C ---- OpenFOAM-6-version-6_orig/src/parallel/decompose/ptscotchDecomp/ptscotchDecomp.C 2018-10-17 11:23:46.321728000 +0200 -+++ OpenFOAM-6-version-6/src/parallel/decompose/ptscotchDecomp/ptscotchDecomp.C 2018-10-17 13:53:18.650845484 +0200 -@@ -31,10 +31,11 @@ - #include "SubField.H" - #include "PstreamGlobals.H" - -+#include <mpi.h> -+ - extern "C" - { - #include <stdio.h> -- #include <mpi.h> - #include "ptscotch.h" - } - -diff -ru OpenFOAM-6-version-6_orig/src/parallel/decompose/scotchDecomp/Make/options OpenFOAM-6-version-6/src/parallel/decompose/scotchDecomp/Make/options ---- OpenFOAM-6-version-6_orig/src/parallel/decompose/scotchDecomp/Make/options 2018-10-17 11:23:46.284785000 +0200 -+++ OpenFOAM-6-version-6/src/parallel/decompose/scotchDecomp/Make/options 2018-10-17 13:55:00.667330000 +0200 -@@ -8,7 +8,6 @@ - EXE_INC = \ - $(PFLAGS) $(PINC) \ - -I$(SCOTCH_ARCH_PATH)/include \ -- -I/usr/include/scotch \ - -I../decompositionMethods/lnInclude - - LIB_LIBS = \ diff --git a/Golden_Repo/o/OpenFOAM/OpenFOAM-6-gpsmpi-2020.eb b/Golden_Repo/o/OpenFOAM/OpenFOAM-6-gpsmpi-2020.eb deleted file mode 100644 index b841de7639a35e578138939f39d562d8995b4939..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenFOAM/OpenFOAM-6-gpsmpi-2020.eb +++ /dev/null @@ -1,64 +0,0 @@ -name = 'OpenFOAM' -version = '6' - -homepage = 'http://www.openfoam.org/' -description = """OpenFOAM is a free, open source CFD software package. - OpenFOAM has an extensive range of features to solve anything from complex fluid flows - involving chemical reactions, turbulence and heat transfer, - to solid dynamics and electromagnetics. - -OpenFOAM %(version)s%(versionsuffix)s is installed in -$EBROOTOPENFOAM -and the corresponding module -OpenFOAM/%(version)s%(versionsuffix)s -provides settings to run its solvers and utilities. - -The full OpenFOAM environment can be initiated via -. $FOAM_BASH -""" -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'cstd': 'c++11'} - -# source_urls = ['https://github.com/OpenFOAM/OpenFOAM-%(version_major)s/archive'] - -sources = ['OpenFOAM-%(version)s-master.zip'] -patches = [ - 'OpenFOAM-%(version)s-cleanup.patch', - 'OpenFOAM-%(version)s-SLURM.patch' -] -checksums = [ - '1fabeca52d307e5cac1ab3785ed0b6888a6e13e21537e860519aad9168bca8ea', # OpenFOAM-6-master.zip - '450e6ddbfda9413cf67a66b5c585b80c24cfb716fa9bf0810830ea7fa2e93bf6', # OpenFOAM-6.0-cleanup.patch - '0c97373dd13403b0c3eee7e13118ae0bfe11a89bb0379be9e39c318e27c43a97' # OpenFOAM-6.0-SLURM.patch -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Bison', '3.6.4'), - ('flex', '2.6.4'), -] - -dependencies = [ - ('libreadline', '8.0'), - ('ncurses', '6.2'), - # OpenFOAM requires 64 bit METIS using 32 bit indexes (array indexes) - ('METIS', '5.1.0'), - ('SCOTCH', '6.1.0'), - ('CGAL', '4.13.1', '-Python-3.8.5'), - # ('ParaView', '5.6.0', '-Python-3.6.8') -] - -parallel = 8 - -prebuildopts = "export FOAMY_HEX_MESH=1 && " - - -modextravars = { - 'MPI_BUFFER_SIZE': '40000000' -} - -modloadmsg = "This module provides only a minimal environment to run %(name)s executables.\n" -modloadmsg += "The full environment can be initiated via:\n" -modloadmsg += ". $FOAM_BASH\n" - -moduleclass = 'cae' diff --git a/Golden_Repo/o/OpenFOAM/OpenFOAM-v1812-cleanup.patch b/Golden_Repo/o/OpenFOAM/OpenFOAM-v1812-cleanup.patch deleted file mode 100644 index bab3303dcade73980fbac604b47a0f4cb55aedc3..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenFOAM/OpenFOAM-v1812-cleanup.patch +++ /dev/null @@ -1,426 +0,0 @@ -# - Replace the OpenFOAM third-party libraries with EASYBUILD variables. -# - Set install dir, compiler, etc using EASYBUILD -# -# authors: Ward Poelmans <wpoely86@gmail.com>, Kenneth Hoste (HPC-UGent), Mark Olesen <Mark.Olesen@esi-group.com>, -# Simon Branford (University of Birmingham) ---- OpenFOAM-v1912/etc/bashrc.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/bashrc 2020-01-06 19:13:09.39069861 +0000 -@@ -94,7 +94,7 @@ - # = SYSTEMOPENMPI | OPENMPI | SYSTEMMPI | MPI | MPICH | MPICH-GM | - # HPMPI | CRAY-MPICH | FJMPI | QSMPI | SGIMPI | INTELMPI | USERMPI - # Also possible to use INTELMPI-xyz etc and define your own wmake rule --export WM_MPLIB=SYSTEMOPENMPI -+export WM_MPLIB=EASYBUILDMPI - - - #------------------------------------------------------------------------------ ---- OpenFOAM-v1912/etc/config.sh/CGAL.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/config.sh/CGAL 2020-01-06 19:13:58.46916514 +0000 -@@ -41,33 +41,10 @@ - #------------------------------------------------------------------------------ - # USER EDITABLE PART: Changes made here may be lost with the next upgrade - --boost_version=boost_1_64_0 --cgal_version=CGAL-4.9.1 -- --export BOOST_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$boost_version --export CGAL_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$cgal_version -+##EasyBuild## settings -+export BOOST_ARCH_PATH="$EBROOTBOOST" -+export CGAL_ARCH_PATH="$EBROOTCGAL" - - # END OF (NORMAL) USER EDITABLE PART - #------------------------------------------------------------------------------ --if [ -n "$FOAM_VERBOSE" ] && [ -n "$PS1" ] --then -- echo "Using boost ($boost_version) -> $BOOST_ARCH_PATH" 1>&2 -- echo "Using CGAL ($cgal_version) -> $CGAL_ARCH_PATH" 1>&2 --fi -- --if command -v _foamAddLibAuto > /dev/null 2>&1 # Normal sourcing (not makeCGAL) --then -- _foamAddLibAuto $BOOST_ARCH_PATH lib$WM_COMPILER_LIB_ARCH -- _foamAddLibAuto $CGAL_ARCH_PATH lib$WM_COMPILER_LIB_ARCH -- -- # GMP/MPFR may have already been added with ThirdParty compiler, but cannot -- # be certain so add here. Any duplicates will be removed later. -- -- _foamAddLibAuto $GMP_ARCH_PATH # No fallback libdir -- _foamAddLibAuto $MPFR_ARCH_PATH # No fallback libdir - -- unset boost_version cgal_version -- --fi -- --#------------------------------------------------------------------------------ ---- OpenFOAM-v1912/etc/config.sh/FFTW.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/config.sh/FFTW 2020-01-06 19:14:18.89030368 +0000 -@@ -33,23 +33,8 @@ - #------------------------------------------------------------------------------ - # USER EDITABLE PART: Changes made here may be lost with the next upgrade - --fftw_version=fftw-3.3.7 --export FFTW_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$fftw_version -+##EasyBuild## settings -+export FFTW_ARCH_PATH="$EBROOTFFTW" - - # END OF (NORMAL) USER EDITABLE PART - #------------------------------------------------------------------------------ --if [ -n "$FOAM_VERBOSE" ] && [ -n "$PS1" ] --then -- echo "Using fftw ($fftw_version) -> $FFTW_ARCH_PATH" 1>&2 --fi -- --if command -v _foamAddLibAuto > /dev/null 2>&1 # Normal sourcing (not makeFFTW) --then -- -- _foamAddLibAuto $FFTW_ARCH_PATH lib$WM_COMPILER_LIB_ARCH -- -- unset fftw_version -- --fi -- --#------------------------------------------------------------------------------ ---- OpenFOAM-v1912/etc/config.sh/gperftools.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/config.sh/gperftools 2020-01-06 19:14:52.90383440 +0000 -@@ -35,32 +35,8 @@ - #------------------------------------------------------------------------------ - # USER EDITABLE PART: Changes made here may be lost with the next upgrade - --gperftools_version=gperftools-2.5 --GPERFTOOLS_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$gperftools_version -+##EasyBuild## settings -+GPERFTOOLS_ARCH_PATH="$EBROOTGPERFTOOLS" - - # END OF (NORMAL) USER EDITABLE PART - #------------------------------------------------------------------------------ --if [ -n "$FOAM_VERBOSE" ] && [ -n "$PS1" ] --then -- echo "Using gperftools ($gperftools_version) -> $GPERFTOOLS_ARCH_PATH" 1>&2 --fi -- --if command -v _foamAddLib > /dev/null 2>&1 # Normal sourcing --then -- -- # If GPERFTOOLS_ARCH_PATH does not end with '-system' or '-none', -- # it is located within ThirdParty, or a central installation -- # outside of ThirdParty and must be added to the lib-path. -- -- ending="${GPERFTOOLS_ARCH_PATH##*-}" -- if [ "$ending" != none ] && [ "$ending" != system ] -- then -- _foamAddLib $GPERFTOOLS_ARCH_PATH/lib$WM_COMPILER_LIB_ARCH -- _foamAddPath $GPERFTOOLS_ARCH_PATH/bin -- fi -- -- unset gperftools_version ending -- --fi -- --#------------------------------------------------------------------------------ ---- OpenFOAM-v1912/etc/config.sh/metis.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/config.sh/metis 2020-01-06 19:14:21.70845195 +0000 -@@ -34,8 +34,9 @@ - #------------------------------------------------------------------------------ - # USER EDITABLE PART: Changes made here may be lost with the next upgrade - --METIS_VERSION=metis-5.1.0 --export METIS_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_LABEL_OPTION/$METIS_VERSION -+##Easybuild## settings -+METIS_VERSION="metis-$EBVERSIONMETIS" -+[ -d "$METIS_ARCH_PATH" ] || METIS_ARCH_PATH="$METIS_ROOT" - - # END OF (NORMAL) USER EDITABLE PART - #------------------------------------------------------------------------------ ---- OpenFOAM-v1912/etc/config.sh/mpi.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/config.sh/mpi 2020-01-06 19:12:41.54940690 +0000 -@@ -96,6 +96,10 @@ - _foamEtc -config mpi-user # <- Adjustments (optional) - ;; - -+EASYBUILDMPI) -+ export FOAM_MPI=mpi -+ ;; -+ - SYSTEMMPI) - export FOAM_MPI=mpi-system - _foamEtc -config mpi-system # <- Adjustments (optional) ---- OpenFOAM-v1912/etc/config.sh/paraview.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/config.sh/paraview 2020-01-06 19:12:41.66878632 +0000 -@@ -51,7 +51,8 @@ - #------------------------------------------------------------------------------ - # USER EDITABLE PART: Changes made here may be lost with the next upgrade - --ParaView_VERSION=5.6.0 -+##Easybuild## settings -+ParaView_VERSION="$EBVERSIONPARAVIEW" - ParaView_QT=qt-system - cmake_version=cmake-system - -@@ -82,14 +83,6 @@ - esac - done - --# ThirdParty cmake --cmake="$archDir/$cmake_version" --if [ -r "$cmake/bin/cmake" ] --then -- # _foamAddPath not available when foamPV function is used -- PATH="$cmake/bin:$PATH" --fi -- - case "$ParaView_VERSION" in - ('') - # empty - do nothing -@@ -105,6 +98,7 @@ - ;; - - (system) -+ unset PV_PLUGIN_PATH - eval "$($WM_PROJECT_DIR/bin/foamEtcFile -sh ${FOAM_CONFIG_NOUSER:+-mode=o} -config paraview-system)" - ;; - -@@ -125,7 +119,9 @@ - ;; - esac - -- export ParaView_DIR="$archDir/ParaView-$ParaView_VERSION" -+ ##Easybuild## settings -+ export ParaView_DIR="$EBROOTPARAVIEW" -+ unset PV_PLUGIN_PATH - - # Set paths if binaries are present - if [ -r "$ParaView_DIR" ] -@@ -133,48 +129,26 @@ - # The OpenFOAM plugin directory must be the first in PV_PLUGIN_PATH - # and have paraview-major.minor encoded in its name - export PV_PLUGIN_PATH="$FOAM_LIBBIN/paraview-$pv_api" -- - export PATH="$ParaView_DIR/bin:$PATH" -- pvLibDir="$ParaView_DIR/lib/paraview-$pv_api" -- #OBSOLETE? pvPython=$ParaView_DIR/Utilities/VTKPythonWrapping -- -- #OBSOLETE? # Python libraries as required -- #OBSOLETE? if [ -r $pvPython ] -- #OBSOLETE? then -- #OBSOLETE? export PYTHONPATH=$PYTHONPATH:${PYTHONPATH:+:}$pvPython:$pvLibDir -- #OBSOLETE? fi -- -- # QT libraries as required, and Qt5_DIR for the root directory. -- # Another possibility: "qtpaths --qt-version" -- qtDir="$archDir/$ParaView_QT" -- if [ -d "$qtDir" ] -- then -- case "$ParaView_QT" in -- (*-5*) -- export Qt5_DIR="$qtDir" -- ;; -- esac -- for qtLibDir in "$qtDir/lib$WM_COMPILER_LIB_ARCH" "$qtDir/lib" -- do -- if [ -d "$qtLibDir" ] -- then -- export LD_LIBRARY_PATH="$qtLibDir:$LD_LIBRARY_PATH" -- break -- fi -- done -- fi - - # ParaView libraries - # - 5.4 and earlier: lib/paraview-X.X -- # - 5.5 and later: lib/ -- case "$pv_api" in -- (5.[0-4]*) -- export LD_LIBRARY_PATH="$pvLibDir:$LD_LIBRARY_PATH" -- ;; -- (*) -- export LD_LIBRARY_PATH="$ParaView_DIR/lib:$LD_LIBRARY_PATH" -- ;; -- esac -+ # - 5.5 and later: lib/, but could also be lib64/ -+ for libDir in "lib$WM_COMPILER_LIB_ARCH" "lib" -+ do -+ pvLibDir="$libDir/paraview-$pv_api" -+ if [ -d "$ParaView_DIR/$pvLibDir" ] -+ then -+ case "$pv_api" in -+ (5.[0-4]*) -+ libDir="$pvLibDir" # Needs lib/paraview-X.X (not lib) -+ ;; -+ esac -+ export LD_LIBRARY_PATH="$ParaView_DIR/$libDir:$LD_LIBRARY_PATH" -+ break -+ fi -+ unset pvLibDir -+ done - - if [ -n "$FOAM_VERBOSE" ] && [ -n "$PS1" ] - then -@@ -198,13 +172,13 @@ - - #------------------------------------------------------------------------------ - --if command -v _foamAddLib > /dev/null 2>&1 # normal sourcing -+if command -v _foamAddLib >/dev/null # normal sourcing - then - unset ParaView_VERSION ParaView_QT - fi - --unset archDir -+unset archDir libDir - unset cmake cmake_version --unset pv_api pvLibDir pvPython qtDir qtLibDir -+unset pv_api pvLibDir pvPython qtDir - - #------------------------------------------------------------------------------ ---- OpenFOAM-v1912/etc/config.sh/scotch.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/config.sh/scotch 2020-01-06 19:14:50.81742938 +0000 -@@ -31,8 +31,10 @@ - #------------------------------------------------------------------------------ - # USER EDITABLE PART: Changes made here may be lost with the next upgrade - --SCOTCH_VERSION=scotch_6.0.6 --export SCOTCH_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_LABEL_OPTION/$SCOTCH_VERSION -+##Easybuild## settings -+export SCOTCH_VERSION="scotch_$EBVERSIONSCOTCH" -+export SCOTCH_ARCH_PATH="$EBROOTSCOTCH" -+[ -d "$SCOTCH_ARCH_PATH" ] || SCOTCH_ARCH_PATH="$SCOTCH_ROOT" - - # END OF (NORMAL) USER EDITABLE PART - #------------------------------------------------------------------------------ ---- OpenFOAM-v1912/etc/config.sh/vtk.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/etc/config.sh/vtk 2020-01-06 19:13:23.08425391 +0000 -@@ -31,46 +31,9 @@ - #------------------------------------------------------------------------------ - # USER EDITABLE PART: Changes made here may be lost with the next upgrade - --vtk_version=VTK-8.2.0 --mesa_version=mesa-17.1.1 --mesa_llvm=none -- --export VTK_DIR="$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$vtk_version" --export MESA_ARCH_PATH="$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$mesa_version" --export LLVM_ARCH_PATH="$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH/$mesa_llvm" -+##Easybuild## settings -+export VTK_DIR="$EBROOTVTK" -+export MESA_ARCH_PATH="$EBROOTMESA" - - # END OF (NORMAL) USER EDITABLE PART - #------------------------------------------------------------------------------ --if [ -n "$FOAM_VERBOSE" ] && [ -n "$PS1" ] --then -- echo "Using vtk" 1>&2 -- echo " vtk ($vtk_version) $VTK_DIR" 1>&2 -- echo " mesa ($mesa_version) $MESA_ARCH_PATH" 1>&2 -- echo " llvm ($mesa_llvm) $LLVM_ARCH_PATH" 1>&2 --fi -- --if command -v _foamAddLib > /dev/null 2>&1 # normal sourcing --then -- -- # Set paths if binaries are present -- if [ -d "$VTK_DIR" ] -- then -- # Uses lib/ and not lib64/ -- _foamAddLib "$VTK_DIR/lib" -- else -- unset VTK_DIR -- fi -- -- if [ -d "$MESA_ARCH_PATH" ] -- then -- _foamAddLibAuto "$LLVM_ARCH_PATH" -- _foamAddLibAuto "$MESA_ARCH_PATH" -- else -- unset LLVM_ARCH_PATH -- unset MESA_ARCH_PATH -- fi -- -- unset vtk_version mesa_version mesa_llvm --fi -- --#------------------------------------------------------------------------------ ---- OpenFOAM-v1912/wmake/scripts/have_readline.orig 2020-01-06 19:07:23.000000000 +0000 -+++ OpenFOAM-v1912/wmake/scripts/have_readline 2020-01-06 19:15:54.77256611 +0000 -@@ -52,47 +52,52 @@ - # -> HAVE_LIBREADLINE, READLINE_INC_DIR, READLINE_LIB_DIR - have_readline() - { -- local header library static settings warn -+ local prefix header library incName libName settings warn - # warn="==> skip readline" - -+ # Expected location, include/library names -+ prefix="$EBROOTLIBREADLINE" -+ incName="readline/readline.h" -+ libName="libreadline" -+ -+ # ---------------------------------- -+ if isNone "$prefix" -+ then -+ [ -n "$warn" ] && echo "$warn (disabled)" -+ return 1 -+ elif hasAbsdir "$prefix" -+ then -+ header=$(findFirstFile "$prefix/include/$incName") -+ library="$(findExtLib $libName)" -+ elif isSystem "$prefix" -+ then -+ header=$(findFirstFile "/usr/local/include/$incName" "/usr/include/$incName") -+ prefix=$(sysPrefix "$header") -+ else -+ unset prefix -+ fi -+ # ---------------------------------- - -- # Header/library names -- header="readline/readline.h" -- library="libreadline$extLibso" -- -- # System only? -- header=$(findFirstFile /usr/local/include/$header /usr/include/$header) -- -- case "$header" in -- /usr/local/*) -- library=$(findFirstFile \ -- /usr/local/lib/$library \ -- /usr/local/lib$WM_COMPILER_LIB_ARCH/$library \ -- ) -- ;; -- -- *) -- library=$(findFirstFile \ -- /usr/lib/$library \ -- /usr/lib$WM_COMPILER_LIB_ARCH/$library \ -- ) -- ;; -- esac -- -- # Header found? -+ # Header - [ -n "$header" ] || { - [ -n "$warn" ] && echo "$warn (no header)" - return 2 - } - -- # Library found? -- [ -n "$library" ] || { -- [ -n "$warn" ] && echo "$warn (missing library)" -+ # Library -+ [ -n "$library" ] || library=$(findLibrary \ -+ "$prefix/lib/$libName" \ -+ "$prefix/lib$WM_COMPILER_LIB_ARCH/$libName" \ -+ ) || { -+ [ -n "$warn" ] && echo "$warn (no library)" - return 2 - } - -- header="${header%/*}" # Strip one-level (include/readline/...) -+ # ---------------------------------- -+ -+ header="${header%/*}" # Strip one-level (include/readline/...) - -+ # OK - export HAVE_LIBREADLINE=true - export READLINE_INC_DIR="${header%/*}" # Basename - export READLINE_LIB_DIR="${library%/*}" # Basename diff --git a/Golden_Repo/o/OpenFOAM/OpenFOAM-v1812-gpsmpi-2020.eb b/Golden_Repo/o/OpenFOAM/OpenFOAM-v1812-gpsmpi-2020.eb deleted file mode 100644 index bcf625e68d05b3d07e3d5173fa7405c0b172379e..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenFOAM/OpenFOAM-v1812-gpsmpi-2020.eb +++ /dev/null @@ -1,68 +0,0 @@ -name = 'OpenFOAM' -version = 'v1812' -versionsuffix = '_200312' - -homepage = 'https://www.openfoam.com/' -description = """OpenFOAM is a free, open source CFD software package. - OpenFOAM has an extensive range of features to solve anything from complex fluid flows - involving chemical reactions, turbulence and heat transfer, - to solid dynamics and electromagnetics. - -OpenFOAM %(version)s%(versionsuffix)s is installed in -$EBROOTOPENFOAM -and the corresponding module -OpenFOAM/%(version)s%(versionsuffix)s -provides settings to run its solvers and utilities. - -The full OpenFOAM environment can be initiated via -. $FOAM_BASH -""" - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'cstd': 'c++17'} - -source_urls = ['https://sourceforge.net/projects/openfoam/files/%(version)s/'] -sources = ['%(name)s-%(version)s%(versionsuffix)s.tgz', 'ThirdParty-v1812.tgz'] -patches = [ - 'OpenFOAM-%(version)s-cleanup.patch', - ('ThirdParty-%(version)s-cleanup.patch', ".."), - # 'OpenFOAM-v1906-CGAL-4.14.1.patch', -] -checksums = [ - '925d2877c12740fab177a30fdcaa8899c262c15b90225f9c29d18a2d97532de0', # OpenFOAM-v1812_200312.tgz - 'f90a66d193f238528543bb4e007b43c90f246351aa9a7ea15ea8e7fe3b410498', # ThirdParty-v1812.tgz - '632c0888d476171823fbec00dd85c951897a49e86c6d44b7ae6fd010ddb4b066', # OpenFOAM-v1812-cleanup.patch - 'd37f3451d5716f1b989f999f46ed74a3e625ccbac78dfbc196203498e941b172', # ThirdParty-v1812-cleanup.patch - 'b986a573775fe108755790ef34529f2afd428be7c4c295fadb58fbe5ea058920', # OpenFOAM-v1906-CGAL-4.14.1.patch -] - -dependencies = [ - ('libreadline', '8.0'), - ('ncurses', '6.2'), - # OpenFOAM requires 64 bit METIS using 32 bit indexes (array indexes) - ('METIS', '5.1.0'), - ('SCOTCH', '6.1.0'), - ('CGAL', '4.13.1', '-Python-3.8.5'), - ('FFTW', '3.3.8'), - # ('ParaView', '5.6.0', '-Python-3.6.8'), -] - -builddependencies = [ - ('Bison', '3.6.4'), - ('CMake', '3.18.0'), - ('flex', '2.6.4'), -] - -parallel = 8 - -prebuildopts = 'export FOAMY_HEX_MESH=1 &&' - -modextravars = { - 'MPI_BUFFER_SIZE': '40000000' -} - -modloadmsg = "This module provides only a minimal environment to run %(name)s executables.\n" -modloadmsg += "The full environment can be initiated via:\n" -modloadmsg += ". $FOAM_BASH\n" - -moduleclass = 'cae' diff --git a/Golden_Repo/o/OpenFOAM/ThirdParty-v1812-cleanup.patch b/Golden_Repo/o/OpenFOAM/ThirdParty-v1812-cleanup.patch deleted file mode 100644 index 45149ebc6d79d628de7da4e4bf5e5043da4f9424..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenFOAM/ThirdParty-v1812-cleanup.patch +++ /dev/null @@ -1,256 +0,0 @@ ---- ThirdParty-v1812/Allwmake.orig 2020-08-19 14:45:23.238743000 +0200 -+++ ThirdParty-v1812/Allwmake 2020-08-31 12:19:28.638081685 +0200 -@@ -73,172 +73,6 @@ - - #------------------------------------------------------------------------------ - # Building scotch on different platforms is still a bit of a pain -- --# Get SCOTCH_VERSION, SCOTCH_ARCH_PATH --if settings=$($WM_PROJECT_DIR/bin/foamEtcFile config.sh/scotch) --then -- . $settings --else -- echo -- echo "Error: no config.sh/scotch settings" -- echo --fi -- --echo --echo ======================================== --echo "Scotch decomposition ($SCOTCH_VERSION)" --echo " $SCOTCH_ARCH_PATH" -- --SCOTCH_SOURCE_DIR=$sourceBASE/$SCOTCH_VERSION -- --# Needs generalizing, but works fairly well --scotchMakefile=etc/makeFiles/scotch/Makefile.inc.OpenFOAM-"$(uname -s)".shlib -- --[ -f $scotchMakefile ] || \ -- scotchMakefile=${scotchMakefile%/*}/Makefile.inc.OpenFOAM-Linux.shlib -- --if [ -f $SCOTCH_ARCH_PATH/include/scotch.h \ -- -a -r $FOAM_EXT_LIBBIN/libscotch.$SO \ -- -a -r $FOAM_EXT_LIBBIN/libscotcherrexit.$SO ] --then -- echo " scotch include: $SCOTCH_ARCH_PATH/include" -- echo " scotch library: $FOAM_EXT_LIBBIN" --elif [ -d "$SCOTCH_SOURCE_DIR" ] --then --( -- # Older versions ok, but scotch-6.0.5a cannot build in parallel. -- # Force serial build -- export WM_NCOMPPROCS=1 -- echo "*** building scotch in serial ***" -- echo -- -- cd $SCOTCH_SOURCE_DIR/src || exit 1 -- export GIT_DIR=$SCOTCH_SOURCE_DIR/.git # Mask seeing our own git-repo -- rm -rf $SCOTCH_ARCH_PATH -- -- applyPatch $SCOTCH_VERSION .. # patch at parent-level -- -- prefixDIR=$SCOTCH_ARCH_PATH -- incDIR=$SCOTCH_ARCH_PATH/include -- libDIR=$FOAM_EXT_LIBBIN -- -- mkdir -p $prefixDIR 2>/dev/null -- mkdir -p $incDIR 2>/dev/null -- mkdir -p $libDIR 2>/dev/null -- -- # Makefile relative to this directory -- scotchMakefile=../../$scotchMakefile -- -- if [ -f $scotchMakefile ] -- then -- rm -f Makefile.inc -- ln -s $scotchMakefile Makefile.inc -- fi -- [ -f Makefile.inc ] || { -- echo " Error: scotch needs an appropriate Makefile.inc" -- exit 1 -- } -- -- export CCS="${CC:-$WM_CC}" # CCS (serial compiler) default=$(CC) -- export CCP=$(whichMpicc) # CCP (parallel compiler) default=mpicc -- -- # Consistency for Intel-MPI and non-icc compilers -- [ -n "$I_MPI_CC" ] || export I_MPI_CC="${CC:-$WM_CC}" -- -- make realclean 2>/dev/null # Extra safety -- make -j $WM_NCOMPPROCS scotch \ -- && make \ -- prefix=$prefixDIR \ -- includedir=$incDIR \ -- libdir=$libDIR \ -- install -- -- make realclean 2>/dev/null || true # Failed cleanup is uncritical --) || warnBuildIssues SCOTCH --else -- warnNotFound SCOTCH --fi -- --# Build ptscotch if MPI (ThirdParty or system) is available --# and normal scotch was built (has include and library) --if [ "${FOAM_MPI:-dummy}" != dummy ] && \ -- [ -f $SCOTCH_ARCH_PATH/include/scotch.h \ -- -a -r $FOAM_EXT_LIBBIN/libscotch.$SO ] || \ --{ -- # Report that the above tests failed and pass-through the failure -- echo -- echo " WARNING: skipping pt-scotch - no mpi or <scotch.h> not found" -- false --} --then -- echo -- echo ======================================== -- echo "pt-scotch decomposition ($SCOTCH_VERSION with $FOAM_MPI)" -- echo " $SCOTCH_ARCH_PATH" -- -- if [ -f $SCOTCH_ARCH_PATH/include/$FOAM_MPI/ptscotch.h \ -- -a -r $FOAM_EXT_LIBBIN/$FOAM_MPI/libptscotch.$SO \ -- -a -r $FOAM_EXT_LIBBIN/$FOAM_MPI/libptscotcherrexit.$SO ] -- then -- echo " ptscotch include: $SCOTCH_ARCH_PATH/include/$FOAM_MPI" -- echo " ptscotch library: $FOAM_EXT_LIBBIN/$FOAM_MPI" -- else -- ( -- # Older versions ok, but scotch-6.0.5a cannot build in parallel. -- # Force serial build -- export WM_NCOMPPROCS=1 -- echo "*** building pt-scotch in serial ***" -- -- cd $SCOTCH_SOURCE_DIR/src || exit 1 -- export GIT_DIR=$SCOTCH_SOURCE_DIR/.git # Mask seeing our own git-repo -- echo -- -- prefixDIR=$SCOTCH_ARCH_PATH -- incDIR=$SCOTCH_ARCH_PATH/include/$FOAM_MPI -- libDIR=$FOAM_EXT_LIBBIN/$FOAM_MPI -- -- mkdir -p $prefixDIR 2>/dev/null -- mkdir -p $incDIR 2>/dev/null -- mkdir -p $libDIR 2>/dev/null -- -- if [ -f $scotchMakefile ] -- then -- rm -f Makefile.inc -- ln -s $scotchMakefile Makefile.inc -- fi -- [ -f Makefile.inc ] || { -- echo " Error: ptscotch needs an appropriate Makefile.inc" -- exit 1 -- } -- -- export CCS="${CC:-$WM_CC}" # CCS (serial compiler) default=$(CC) -- export CCP=$(whichMpicc) # CCP (parallel compiler) default=mpicc -- -- # Consistency for Intel-MPI and non-icc compilers -- [ -n "$I_MPI_CC" ] || export I_MPI_CC="${CC:-$WM_CC}" -- -- make realclean 2>/dev/null # Extra safety -- make -j $WM_NCOMPPROCS ptscotch \ -- && make \ -- prefix=$prefixDIR \ -- includedir=$incDIR \ -- libdir=$libDIR \ -- install -- -- make realclean 2>/dev/null || true # Failed cleanup is uncritical -- ) || warnBuildIssues PTSCOTCH -- fi -- -- # Verify existence of ptscotch include -- [ -f $SCOTCH_ARCH_PATH/include/$FOAM_MPI/ptscotch.h ] || { -- echo -- echo " WARNING: required include file 'ptscotch.h' not found!" -- } --fi -- --# Could now remove $SCOTCH_SOURCE_DIR/src/Makefile.inc -- -- - #------------------------------------------------------------------------------ - - echo -@@ -258,45 +92,44 @@ - fi - - #------------------------------------------------------------------------------ --echo --echo ======================================== -- -+#echo -+#echo ======================================== - # Get METIS_ARCH_PATH --if settings=$($WM_PROJECT_DIR/bin/foamEtcFile config.sh/metis) --then -- . $settings --fi --if [ -n "$METIS_ARCH_PATH" ] --then -- echo Metis decomposition -- ./makeMETIS -test "$METIS_ARCH_PATH" || \ -- ./makeMETIS || warnBuildIssues METIS --else -- warnNotFound METIS # METIS is optional --fi -+#if settings=$($WM_PROJECT_DIR/bin/foamEtcFile config.sh/metis) -+#then -+# . $settings -+#fi -+#if [ -n "$METIS_ARCH_PATH" ] -+#then -+# echo Metis decomposition -+# ./makeMETIS -test "$METIS_ARCH_PATH" || \ -+# ./makeMETIS || warnBuildIssues METIS -+#else -+# warnNotFound METIS # METIS is optional -+#fi - - #------------------------------------------------------------------------------ --echo --echo ======================================== --if [ -n "$CGAL_ARCH_PATH" ] --then -- echo "CGAL/boost" -- ./makeCGAL -test "$CGAL_ARCH_PATH" "$BOOST_ARCH_PATH" || \ -- ./makeCGAL || warnBuildIssues CGAL --else -- warnNotFound CGAL # CGAL is optional --fi -- --echo --echo ======================================== --if [ -n "$FFTW_ARCH_PATH" ] --then -- echo FFTW -- ./makeFFTW -test "$FFTW_ARCH_PATH" || \ -- ./makeFFTW || warnBuildIssues FFTW --else -- warnNotFound FFTW # FFTW is optional --fi -+#echo -+#echo ======================================== -+#if [ -n "$CGAL_ARCH_PATH" ] -+#then -+# echo "CGAL/boost" -+# ./makeCGAL -test "$CGAL_ARCH_PATH" "$BOOST_ARCH_PATH" || \ -+# ./makeCGAL || warnBuildIssues CGAL -+#else -+# warnNotFound CGAL # CGAL is optional -+#fi -+ -+#echo -+#echo ======================================== -+#if [ -n "$FFTW_ARCH_PATH" ] -+#then -+# echo FFTW -+# ./makeFFTW -test "$FFTW_ARCH_PATH" || \ -+# ./makeFFTW || warnBuildIssues FFTW -+#else -+# warnNotFound FFTW # FFTW is optional -+#fi - - echo - echo ======================================== diff --git a/Golden_Repo/o/OpenGL/OpenGL-2020-GCCcore-10.3.0.eb b/Golden_Repo/o/OpenGL/OpenGL-2020-GCCcore-10.3.0.eb deleted file mode 100644 index 296e7be38203535bf80d728ec1a81633faf23c8f..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenGL/OpenGL-2020-GCCcore-10.3.0.eb +++ /dev/null @@ -1,225 +0,0 @@ -easyblock = 'Bundle' - -name = 'OpenGL' -version = '2020' - -homepage = 'http://www.opengl.org/' -description = """ -Open Graphics Library (OpenGL) is a cross-language, cross-platform application programming interface (API) for rendering -2D and 3D vector graphics. Mesa is an open-source implementation of the OpenGL specification - a system for rendering -interactive 3D graphics. NVIDIA supports OpenGL and a complete set of OpenGL extensions, designed to give a maximum -performance on NVIDIA GPUs. - -This is a GL vendor neutral dispatch (GLVND) installation with Mesa and NVIDIA in the same lib-directory. Mesa or NVIDIA -OpenGL is set individually for each XScreen. -""" - -site_contacts = 'Jens Henrik Goebbert <j.goebbert@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -# swr detects and builds parts specific for AVX and AVX2. If we use -# -xHost, this always gets overwritten and will fail. -toolchainopts = {'optarch': False} - -builddependencies = [ - ('Python', '3.8.5'), - ('binutils', '2.36.1'), - ('flex', '2.6.4'), - ('Bison', '3.7.6'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), - ('expat', '2.2.9'), - ('libxml2', '2.9.10'), - ('Meson', '0.55.0', '-Python-%(pyver)s'), - ('Ninja', '1.10.0'), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('nettle', '3.6'), - ('libdrm', '2.4.106'), - ('LLVM', '10.0.1'), - ('X11', '20200222'), - ('libunwind', '1.4.0'), - ('nvidia-driver', 'default', '', SYSTEM), -] - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCE_TAR_GZ], - 'start_dir': '%(name)s-%(version)s', -} - -local_pkg_config = ('export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:%(installdir)s/lib/pkgconfig && ' - 'export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:%(installdir)s/lib64/pkgconfig && ') - -components = [ - # A vendor neutral dispatch layer - ('libglvnd', '1.3.2', { - 'source_urls': [ - 'https://gitlab.freedesktop.org/glvnd/libglvnd/-/archive/v%(version)s/' - ], - 'sources': ['%(name)s-v%(version)s.tar.gz'], - 'start_dir': '%(name)s-v%(version)s', - 'checksums': ['6332c27f4c792b09a3eb1d7ae18c2d6ff6a0acaf3a746489caf859e659fca2f7'], - 'preconfigopts': './autogen.sh && ' - }), - # Mesa for software rendering, not hardware rendering. - ('Mesa', '20.1.4', { - # We build: - # - llvmpipe: the high-performance Gallium LLVM driver (only possible with glx=gallium-xlib) - # - swr: Intel's OpenSWR - 'easyblock': 'MesonNinja', - 'source_urls': [ - 'https://mesa.freedesktop.org/archive/', - 'https://mesa.freedesktop.org/archive/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/older-versions/%(version_major)s.x/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/older-versions/%(version_major)s.x', - ], - 'sources': [SOURCELOWER_TAR_XZ], - 'checksums': [ - '6800271c2be2a0447510eb4e9b67edd9521859a4d565310617c4b359eb6799fe', - ], - 'start_dir': '%(namelower)s-%(version)s', - 'separate_build_dir': True, - 'preconfigopts': local_pkg_config, - 'configopts': (' -D libdir=%(installdir)s/lib' - ' -D gbm=true' - ' -D glx=auto' - ' -D osmesa=gallium' - ' -D llvm=true' - ' -D shared-llvm=true' - ' -D dri-drivers=""' - ' -D gallium-drivers="swr,swrast"' - ' -D swr-arches=avx,avx2,skx,knl' - ' -D vulkan-drivers=""' - ' -D platforms=x11,drm,surfaceless' - ' -D glvnd=true' - ' -D libunwind=true' - ' -D egl=true' - ' -D gles1=true -Dgles2=true' - ' -D shared-glapi=true' - ' -D gallium-omx=disabled' - ' -D gallium-media=omx,va,vdpau,xvmc' - ' -D buildtype=release'), - }), - # OpenGL Utility Library - offers higher level GL-graphics functions - ('glu', '9.0.1', { - 'preconfigopts': local_pkg_config, - 'source_urls': [ - 'ftp://ftp.freedesktop.org/pub/mesa/glu/' - ], - 'sources': [ - 'glu-%(version)s.tar.gz' - ], - 'checksums': [ - 'f6f484cfcd51e489afe88031afdea1e173aa652697e4c19ddbcb8260579a10f7', - ], - }), - # OpenGL Extension Wrangler Library - determines which OpenGL extensions are supported at run-time - # This is just GLEW for GLX (which requires DISPLAY to be set) and not GLEW for EGL as GLEW selects GLX/EGL at - # compile-time and not run-time (https://github.com/nigels-com/glew/issues/172#issuecomment-357400019) - # Compile+Load GLEW-EGL on top to enable GLEW for EGL - ('glew', '2.2.0', { - 'source_urls': [ - 'https://sourceforge.net/projects/glew/files/glew/snapshots/', - ], - 'sources': [ - 'glew-20200115.tgz', - ], - 'checksums': [ - '314219ba1db50d49b99705e8eb00e83b230ee7e2135289a00b5b570e4a4db43a', - ], - 'skipsteps': ['configure'], - 'buildopts': ('GLEW_PREFIX=%(installdir)s GLEW_DEST=%(installdir)s LIBDIR=%(installdir)s/lib ' - 'LDFLAGS.EXTRA="-L${EBROOTX11}/lib/ -lX11" LDFLAGS.GL="-L%(installdir)s/lib -lGL"'), - 'installopts': 'GLEW_PREFIX=%(installdir)s GLEW_DEST=%(installdir)s LIBDIR=%(installdir)s/lib ', - 'install_cmd': 'make install.all ', - }), - # MESA demos - offers the important command 'eglinfo' - ('demos', '95c1a57cfdd1ef2852c828cba4659a72575c5c5d', { - 'source_urls': [ - 'https://gitlab.freedesktop.org/mesa/demos/-/archive/%(version)s/', - ], - 'sources': [SOURCELOWER_TAR_GZ], - 'checksums': [ - '7738beca8f6f6981ba04c8a22fde24d69d6b2aaab1758ac695c9475bf704249c', - ], - 'preconfigopts': ('./autogen.sh && ' + - local_pkg_config + - 'GLEW_CFLAGS="-I%(installdir)s/include/" GLEW_LIBS="-L%(installdir)s/lib/ -lGLEW -lGL" ' - 'EGL_CFLAGS="-I%(installdir)s/include/" EGL_LIBS="-L%(installdir)s/lib/ -lEGL" '), - 'configopts': '--disable-osmesa ', - }), -] - -postinstallcmds = [ - 'cd %(installdir)s/lib && ln -sf libGL.so.1.7.0 libGL.so.1', - 'rm %(installdir)s/lib/*.la', - 'cd %(installdir)s/lib && ln -sf ${EBROOTNVIDIA}/lib64/libEGL_nvidia.so.0 .', - 'cd %(installdir)s/lib && ln -sf ${EBROOTNVIDIA}/lib64/libGLX_nvidia.so.0 .', - 'cd %(installdir)s/lib && ln -sf libGLX_mesa.so.0 libGLX_indirect.so.0', - 'cd %(installdir)s/lib && ln -sf ${EBROOTNVIDIA}/lib64/libGLESv1_CM_nvidia.so.1 .', - 'cd %(installdir)s/lib && ln -sf ${EBROOTNVIDIA}/lib64/libGLESv2_nvidia.so.2 .', - # EGL vendor ICDs - ( - '{ cat > %(installdir)s/share/glvnd/egl_vendor.d/10_nvidia.json; } << \'EOF\'\n' - '{\n' - ' \"file_format_version\" : \"1.0.0\",\n' - ' \"ICD\" : {\n' - ' \"library_path\" : \"libEGL_nvidia.so.0\"\n' - ' }\n' - '}\n' - 'EOF' - ), - ( - '{ cat > %(installdir)s/share/glvnd/egl_vendor.d/50_mesa.json; } << \'EOF\'\n' - '{\n' - ' \"file_format_version\" : \"1.0.0\",\n' - ' \"ICD\" : {\n' - ' \"library_path\" : \"libEGL_mesa.so.0\"\n' - ' }\n' - '}\n' - 'EOF' - ), - # correct pkg-config of GLEW - 'sed -i "/^libdir=/c\libdir=\${exec_prefix}\/lib" %(installdir)s/lib/pkgconfig/glew.pc', - 'sed -i "/^prefix=/c\prefix=%(installdir)s" %(installdir)s/lib/pkgconfig/glew.pc', -] - -modextravars = { - '__EGL_VENDOR_LIBRARY_FILENAMES': ('%(installdir)s/share/glvnd/egl_vendor.d/10_nvidia.json:' - '%(installdir)s/share/glvnd/egl_vendor.d/50_mesa.json'), - 'EGL_PLATFORM': 'surfaceless', - 'EGL_DRIVER': 'swr', - 'EGL_LOG_LEVEL': 'fatal', - 'GALLIUM_DRIVER': 'swr', - 'KNOB_MAX_WORKER_THREADS': '65535', -} - -modluafooter = ''' -add_property("arch","gpu") - -conflict("Mesa") -conflict("libGLU") -''' - -sanity_check_paths = { - 'files': [ - 'lib/libEGL_mesa.%s' % SHLIB_EXT, 'lib/libOSMesa.%s' % SHLIB_EXT, - 'lib/libGLESv1_CM.%s' % SHLIB_EXT, 'lib/libGLESv2.%s' % SHLIB_EXT, - 'include/GL/glext.h', 'include/GL/glx.h', - 'include/GL/osmesa.h', 'include/GL/gl.h', 'include/GL/glxext.h', - 'include/GLES/gl.h', 'include/GLES2/gl2.h', 'include/GLES3/gl3.h', - 'lib/libOpenGL.%s' % SHLIB_EXT, - 'lib/libGLEW.a', 'lib/libGLEW.%s' % SHLIB_EXT, - 'bin/glewinfo', 'bin/visualinfo', - 'include/GL/glew.h', 'include/GL/glxew.h', 'include/GL/wglew.h', - ], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OpenGL/OpenGL-2020-GCCcore-9.3.0.eb b/Golden_Repo/o/OpenGL/OpenGL-2020-GCCcore-9.3.0.eb deleted file mode 100644 index 721d44504c721e52e00b21c0be71f4ccfe8f9505..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenGL/OpenGL-2020-GCCcore-9.3.0.eb +++ /dev/null @@ -1,225 +0,0 @@ -easyblock = 'Bundle' - -name = 'OpenGL' -version = '2020' - -homepage = 'http://www.opengl.org/' -description = """ -Open Graphics Library (OpenGL) is a cross-language, cross-platform application programming interface (API) for rendering -2D and 3D vector graphics. Mesa is an open-source implementation of the OpenGL specification - a system for rendering -interactive 3D graphics. NVIDIA supports OpenGL and a complete set of OpenGL extensions, designed to give a maximum -performance on NVIDIA GPUs. - -This is a GL vendor neutral dispatch (GLVND) installation with Mesa and NVIDIA in the same lib-directory. Mesa or NVIDIA -OpenGL is set individually for each XScreen. -""" - -site_contacts = 'Jens Henrik Goebbert <j.goebbert@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -# swr detects and builds parts specific for AVX and AVX2. If we use -# -xHost, this always gets overwritten and will fail. -toolchainopts = {'optarch': False} - -builddependencies = [ - ('Python', '3.8.5'), - ('binutils', '2.34'), - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), - ('expat', '2.2.9'), - ('libxml2', '2.9.10'), - ('Meson', '0.55.0', '-Python-%(pyver)s'), - ('Ninja', '1.10.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('nettle', '3.6'), - ('libdrm', '2.4.99'), - ('LLVM', '10.0.1'), - ('X11', '20200222'), - ('libunwind', '1.4.0'), - ('nvidia-driver', 'default', '', SYSTEM), -] - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCE_TAR_GZ], - 'start_dir': '%(name)s-%(version)s', -} - -local_pkg_config = ('export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:%(installdir)s/lib/pkgconfig && ' - 'export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:%(installdir)s/lib64/pkgconfig && ') - -components = [ - # A vendor neutral dispatch layer - ('libglvnd', '1.3.2', { - 'source_urls': [ - 'https://gitlab.freedesktop.org/glvnd/libglvnd/-/archive/v%(version)s/' - ], - 'sources': ['%(name)s-v%(version)s.tar.gz'], - 'start_dir': '%(name)s-v%(version)s', - 'checksums': ['6332c27f4c792b09a3eb1d7ae18c2d6ff6a0acaf3a746489caf859e659fca2f7'], - 'preconfigopts': './autogen.sh && ' - }), - # Mesa for software rendering, not hardware rendering. - ('Mesa', '20.1.4', { - # We build: - # - llvmpipe: the high-performance Gallium LLVM driver (only possible with glx=gallium-xlib) - # - swr: Intel's OpenSWR - 'easyblock': 'MesonNinja', - 'source_urls': [ - 'https://mesa.freedesktop.org/archive/', - 'https://mesa.freedesktop.org/archive/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/older-versions/%(version_major)s.x/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/older-versions/%(version_major)s.x', - ], - 'sources': [SOURCELOWER_TAR_XZ], - 'checksums': [ - '6800271c2be2a0447510eb4e9b67edd9521859a4d565310617c4b359eb6799fe', - ], - 'start_dir': '%(namelower)s-%(version)s', - 'separate_build_dir': True, - 'preconfigopts': local_pkg_config, - 'configopts': (' -D libdir=%(installdir)s/lib' - ' -D gbm=true' - ' -D glx=auto' - ' -D osmesa=gallium' - ' -D llvm=true' - ' -D shared-llvm=true' - ' -D dri-drivers=""' - ' -D gallium-drivers="swr,swrast"' - ' -D swr-arches=avx,avx2,skx,knl' - ' -D vulkan-drivers=""' - ' -D platforms=x11,drm,surfaceless' - ' -D glvnd=true' - ' -D libunwind=true' - ' -D egl=true' - ' -D gles1=true -Dgles2=true' - ' -D shared-glapi=true' - ' -D gallium-omx=disabled' - ' -D gallium-media=omx,va,vdpau,xvmc' - ' -D buildtype=release'), - }), - # OpenGL Utility Library - offers higher level GL-graphics functions - ('glu', '9.0.1', { - 'preconfigopts': local_pkg_config, - 'source_urls': [ - 'ftp://ftp.freedesktop.org/pub/mesa/glu/' - ], - 'sources': [ - 'glu-%(version)s.tar.gz' - ], - 'checksums': [ - 'f6f484cfcd51e489afe88031afdea1e173aa652697e4c19ddbcb8260579a10f7', - ], - }), - # OpenGL Extension Wrangler Library - determines which OpenGL extensions are supported at run-time - # This is just GLEW for GLX (which requires DISPLAY to be set) and not GLEW for EGL as GLEW selects GLX/EGL at - # compile-time and not run-time (https://github.com/nigels-com/glew/issues/172#issuecomment-357400019) - # Compile+Load GLEW-EGL on top to enable GLEW for EGL - ('glew', '2.2.0', { - 'source_urls': [ - 'https://sourceforge.net/projects/glew/files/glew/snapshots/', - ], - 'sources': [ - 'glew-20200115.tgz', - ], - 'checksums': [ - '314219ba1db50d49b99705e8eb00e83b230ee7e2135289a00b5b570e4a4db43a', - ], - 'skipsteps': ['configure'], - 'buildopts': ('GLEW_PREFIX=%(installdir)s GLEW_DEST=%(installdir)s LIBDIR=%(installdir)s/lib ' - 'LDFLAGS.EXTRA="-L${EBROOTX11}/lib/ -lX11" LDFLAGS.GL="-L%(installdir)s/lib -lGL"'), - 'installopts': 'GLEW_PREFIX=%(installdir)s GLEW_DEST=%(installdir)s LIBDIR=%(installdir)s/lib ', - 'install_cmd': 'make install.all ', - }), - # MESA demos - offers the important command 'eglinfo' - ('demos', '95c1a57cfdd1ef2852c828cba4659a72575c5c5d', { - 'source_urls': [ - 'https://gitlab.freedesktop.org/mesa/demos/-/archive/%(version)s/', - ], - 'sources': [SOURCELOWER_TAR_GZ], - 'checksums': [ - '7738beca8f6f6981ba04c8a22fde24d69d6b2aaab1758ac695c9475bf704249c', - ], - 'preconfigopts': ('./autogen.sh && ' + - local_pkg_config + - 'GLEW_CFLAGS="-I%(installdir)s/include/" GLEW_LIBS="-L%(installdir)s/lib/ -lGLEW -lGL" ' - 'EGL_CFLAGS="-I%(installdir)s/include/" EGL_LIBS="-L%(installdir)s/lib/ -lEGL" '), - 'configopts': '--disable-osmesa ', - }), -] - -postinstallcmds = [ - 'cd %(installdir)s/lib && ln -sf libGL.so.1.7.0 libGL.so.1', - 'rm %(installdir)s/lib/*.la', - 'cd %(installdir)s/lib && ln -sf ${EBROOTNVIDIA}/lib64/libEGL_nvidia.so.0 .', - 'cd %(installdir)s/lib && ln -sf ${EBROOTNVIDIA}/lib64/libGLX_nvidia.so.0 .', - 'cd %(installdir)s/lib && ln -sf libGLX_mesa.so.0 libGLX_indirect.so.0', - 'cd %(installdir)s/lib && ln -sf ${EBROOTNVIDIA}/lib64/libGLESv1_CM_nvidia.so.1 .', - 'cd %(installdir)s/lib && ln -sf ${EBROOTNVIDIA}/lib64/libGLESv2_nvidia.so.2 .', - # EGL vendor ICDs - ( - '{ cat > %(installdir)s/share/glvnd/egl_vendor.d/10_nvidia.json; } << \'EOF\'\n' - '{\n' - ' \"file_format_version\" : \"1.0.0\",\n' - ' \"ICD\" : {\n' - ' \"library_path\" : \"libEGL_nvidia.so.0\"\n' - ' }\n' - '}\n' - 'EOF' - ), - ( - '{ cat > %(installdir)s/share/glvnd/egl_vendor.d/50_mesa.json; } << \'EOF\'\n' - '{\n' - ' \"file_format_version\" : \"1.0.0\",\n' - ' \"ICD\" : {\n' - ' \"library_path\" : \"libEGL_mesa.so.0\"\n' - ' }\n' - '}\n' - 'EOF' - ), - # correct pkg-config of GLEW - 'sed -i "/^libdir=/c\libdir=\${exec_prefix}\/lib" %(installdir)s/lib/pkgconfig/glew.pc', - 'sed -i "/^prefix=/c\prefix=%(installdir)s" %(installdir)s/lib/pkgconfig/glew.pc', -] - -modextravars = { - '__EGL_VENDOR_LIBRARY_FILENAMES': ('%(installdir)s/share/glvnd/egl_vendor.d/10_nvidia.json:' - '%(installdir)s/share/glvnd/egl_vendor.d/50_mesa.json'), - 'EGL_PLATFORM': 'surfaceless', - 'EGL_DRIVER': 'swr', - 'EGL_LOG_LEVEL': 'fatal', - 'GALLIUM_DRIVER': 'swr', - 'KNOB_MAX_WORKER_THREADS': '65535', -} - -modluafooter = ''' -add_property("arch","gpu") - -conflict("Mesa") -conflict("libGLU") -''' - -sanity_check_paths = { - 'files': [ - 'lib/libEGL_mesa.%s' % SHLIB_EXT, 'lib/libOSMesa.%s' % SHLIB_EXT, - 'lib/libGLESv1_CM.%s' % SHLIB_EXT, 'lib/libGLESv2.%s' % SHLIB_EXT, - 'include/GL/glext.h', 'include/GL/glx.h', - 'include/GL/osmesa.h', 'include/GL/gl.h', 'include/GL/glxext.h', - 'include/GLES/gl.h', 'include/GLES2/gl2.h', 'include/GLES3/gl3.h', - 'lib/libOpenGL.%s' % SHLIB_EXT, - 'lib/libGLEW.a', 'lib/libGLEW.%s' % SHLIB_EXT, - 'bin/glewinfo', 'bin/visualinfo', - 'include/GL/glew.h', 'include/GL/glxew.h', 'include/GL/wglew.h', - ], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OpenImageDenoise/OpenImageDenoise-1.2.4-gcccoremkl-10.3.0-2021.2.0.eb b/Golden_Repo/o/OpenImageDenoise/OpenImageDenoise-1.2.4-gcccoremkl-10.3.0-2021.2.0.eb deleted file mode 100644 index 4d1b6e6348ade17d055138f3d75413104058e976..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenImageDenoise/OpenImageDenoise-1.2.4-gcccoremkl-10.3.0-2021.2.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'OpenImageDenoise' -version = '1.2.4' - -homepage = 'https://www.openimagedenoise.org/' -description = """ -Intel Open Image Denoise is an open source library of high-performance, -high-quality denoising filters for images rendered with ray tracing. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/OpenImageDenoise/oidn/releases/download/v%(version)s/'] -sources = ['oidn-%(version)s.src.tar.gz'] -checksums = ['948b070c780b5de0d983e7d5d37f6d9454932cc278913d9ee5b0bd047d23864a'] - -builddependencies = [ - ('ispc', '1.14.1', '', SYSTEM), - ('CMake', '3.18.0', '', SYSTEM), - ('Python', '3.8.5'), -] - -dependencies = [ - ('tbb', '2020.3'), -] - -separate_build_dir = True -start_dir = 'oidn-%(version)s' - -configopts = '-DOIDN_APPS:BOOL=OFF ' - -sanity_check_paths = { - 'dirs': ['include/OpenImageDenoise'], - 'files': ['lib/libOpenImageDenoise.so'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/OpenJPEG/OpenJPEG-2.3.1-GCCcore-10.3.0.eb b/Golden_Repo/o/OpenJPEG/OpenJPEG-2.3.1-GCCcore-10.3.0.eb deleted file mode 100644 index c1f7938e35dd00dd0fd4ba372ae318f17d7c44d9..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenJPEG/OpenJPEG-2.3.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,50 +0,0 @@ -# easybuild easyconfig -# -# John Dey jfdey@fredhutch.org -# -# Fred Hutchenson Cancer Research Center Seattle WA -easyblock = 'CMakeMake' - -name = 'OpenJPEG' -version = '2.3.1' - -homepage = 'https://www.openjpeg.org/' -description = """OpenJPEG is an open-source JPEG 2000 codec written in - C language. It has been developed in order to promote the use of JPEG 2000, - a still-image compression standard from the Joint Photographic Experts Group - (JPEG). Since may 2015, it is officially recognized by ISO/IEC and ITU-T as - a JPEG 2000 Reference Software.""" - -site_contacts = "a.strube@fz-juelich.de" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/uclouvain/openjpeg/archive'] -sources = ['v%(version)s.tar.gz'] -checksums = ['63f5a4713ecafc86de51bfad89cc07bb788e9bba24ebbf0c4ca637621aadb6a9'] - -separate_build_dir = True - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM) -] - -# for running the binary of openjpeg like opj_compress you need the libraries like zlib etc. -dependencies = [ - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0') -] - -sanity_check_paths = { - 'files': ['bin/opj_compress', - 'bin/opj_decompress', - 'bin/opj_dump', - 'include/openjpeg-%(version_major)s.%(version_minor)s/openjpeg.h', - 'lib/libopenjp2.%s' % SHLIB_EXT], - 'dirs': ['bin', 'include', 'lib'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/o/OpenJPEG/OpenJPEG-2.3.1-GCCcore-9.3.0.eb b/Golden_Repo/o/OpenJPEG/OpenJPEG-2.3.1-GCCcore-9.3.0.eb deleted file mode 100644 index 81f83b1813baf768ad0c1a942be13cd7951559fb..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenJPEG/OpenJPEG-2.3.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,50 +0,0 @@ -# easybuild easyconfig -# -# John Dey jfdey@fredhutch.org -# -# Fred Hutchenson Cancer Research Center Seattle WA -easyblock = 'CMakeMake' - -name = 'OpenJPEG' -version = '2.3.1' - -homepage = 'https://www.openjpeg.org/' -description = """OpenJPEG is an open-source JPEG 2000 codec written in - C language. It has been developed in order to promote the use of JPEG 2000, - a still-image compression standard from the Joint Photographic Experts Group - (JPEG). Since may 2015, it is officially recognized by ISO/IEC and ITU-T as - a JPEG 2000 Reference Software.""" - -site_contacts = "a.strube@fz-juelich.de" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/uclouvain/openjpeg/archive'] -sources = ['v%(version)s.tar.gz'] -checksums = ['63f5a4713ecafc86de51bfad89cc07bb788e9bba24ebbf0c4ca637621aadb6a9'] - -separate_build_dir = True - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0') -] - -# for running the binary of openjpeg like opj_compress you need the libraries like zlib etc. -dependencies = [ - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('LibTIFF', '4.1.0') -] - -sanity_check_paths = { - 'files': ['bin/opj_compress', - 'bin/opj_decompress', - 'bin/opj_dump', - 'include/openjpeg-%(version_major)s.%(version_minor)s/openjpeg.h', - 'lib/libopenjp2.%s' % SHLIB_EXT], - 'dirs': ['bin', 'include', 'lib'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb b/Golden_Repo/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb deleted file mode 100644 index de60398a7f90f6d60440323b05b621eb900a388c..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb +++ /dev/null @@ -1,57 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default OpenMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': '^uct,openib', - 'OMPI_MCA_btl_openib_allow_ib': '1', - 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - 'OMPI_MCA_coll': '^ml', - 'OMPI_MCA_coll_hcoll_enable': '1', - 'OMPI_MCA_coll_hcoll_np': '0', - 'OMPI_MCA_pml': 'ucx', - 'OMPI_MCA_osc': '^rdma', - 'OMPI_MCA_opal_abort_print_stack': '1', - 'OMPI_MCA_opal_set_max_sys_limits': '1', - 'OMPI_MCA_opal_event_include': 'epoll', - 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # OMPIO does not seem to work reliably on our system - 'OMPI_MCA_io': 'romio321', -} - -moduleclass = 'system' diff --git a/Golden_Repo/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb b/Golden_Repo/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb deleted file mode 100644 index 351d51b1883d91597e3f16a3bdcdc5d30f35bc3b..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'CUDA' - -homepage = '' -description = """This is a module to load the default OpenMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': '^uct,openib', - 'OMPI_MCA_btl_openib_allow_ib': '1', - 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - 'OMPI_MCA_coll': '^ml', - 'OMPI_MCA_coll_hcoll_enable': '1', - 'OMPI_MCA_coll_hcoll_np': '0', - 'OMPI_MCA_pml': 'ucx', - 'OMPI_MCA_osc': '^rdma', - 'OMPI_MCA_opal_abort_print_stack': '1', - 'OMPI_MCA_opal_set_max_sys_limits': '1', - 'OMPI_MCA_opal_event_include': 'epoll', - 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # OMPIO does not seem to work reliably on our system - 'OMPI_MCA_io': 'romio321', -} - -moduleclass = 'system' diff --git a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb b/Golden_Repo/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb deleted file mode 100644 index 0bea9988866163386aa1244b56e37a6ac00c9266..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.8.1', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index a849959f4044dca70ef4f16f22ad221fbfa03629..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.8.1', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-GCC-10.3.0.eb b/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-GCC-10.3.0.eb deleted file mode 100644 index 01038b7bec6981d5ff03bd112e6e42ae8d3cfbb8..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-GCC-10.3.0.eb +++ /dev/null @@ -1,63 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.1' - -homepage = 'https://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['e24f7a778bd11a71ad0c14587a7f5b00e68a71aa5623e2157bafee3d44c07cda'] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.4.1'), - ('UCX', '1.10.1', '', SYSTEM), - ('CUDA', '11.3', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", - "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-NVHPC-21.5-GCC-10.3.0.eb b/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-NVHPC-21.5-GCC-10.3.0.eb deleted file mode 100644 index 3e98dfaaa06cf83b4bee2c784aa100e38a059b2d..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-NVHPC-21.5-GCC-10.3.0.eb +++ /dev/null @@ -1,63 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.1' - -homepage = 'https://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '21.5-GCC-10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['e24f7a778bd11a71ad0c14587a7f5b00e68a71aa5623e2157bafee3d44c07cda'] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.4.1'), - ('UCX', '1.10.1', '', SYSTEM), - ('CUDA', '11.3', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", - "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index 13c5bf27ac0a6f31d97e1cba6e285ba8c8b4b9b8..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OpenMPI/OpenMPI-4.1.1-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,63 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.1' - -homepage = 'https://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['e24f7a778bd11a71ad0c14587a7f5b00e68a71aa5623e2157bafee3d44c07cda'] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.4.1'), - ('UCX', '1.10.1', '', SYSTEM), - ('CUDA', '11.3', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", - "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -moduleclass = 'mpi' diff --git a/Golden_Repo/o/OptiX/OptiX-6.5.0.eb b/Golden_Repo/o/OptiX/OptiX-6.5.0.eb deleted file mode 100644 index 6232d58d8cf24e397ff21ada5b873c4d69284119..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/OptiX/OptiX-6.5.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild -# Authors:: Stephane Thiell <sthiell@stanford.edu> -## -easyblock = 'Binary' - -name = 'OptiX' -version = '6.5.0' - -homepage = 'https://developer.nvidia.com/optix' -description = """OptiX is NVIDIA SDK for easy ray tracing performance. - It provides a simple framework for accessing the GPU’s massive ray tracing - power using state-of-the-art GPU algorithms.""" - -toolchain = SYSTEM - -# Registration required. Download links: -# https://developer.nvidia.com/designworks/optix/download -# https://developer.nvidia.com/designworks/optix/downloads/legacy -sources = ['NVIDIA-OptiX-SDK-%(version)s-linux64.sh'] -checksums = ['eca09e617a267e18403ecccc715c5bc3a88729b81589a828fcb696435100a62e'] - -install_cmd = "./" + sources[0] + " --skip-license --prefix=%(installdir)s" - -sanity_check_paths = { - 'files': ["include/optix.h", "include/optix_cuda.h", "lib64/liboptix.%s" % SHLIB_EXT, - "lib64/liboptixu.%s" % SHLIB_EXT], - 'dirs': [] -} - -modextravars = {'OPTIX_HOME': '%(installdir)s'} - -moduleclass = 'vis' diff --git a/Golden_Repo/o/openvkl/openvkl-0.13.0-GCC-10.3.0.eb b/Golden_Repo/o/openvkl/openvkl-0.13.0-GCC-10.3.0.eb deleted file mode 100644 index bd22b8a1d764d27110163c0af4f72b1c91e81aed..0000000000000000000000000000000000000000 --- a/Golden_Repo/o/openvkl/openvkl-0.13.0-GCC-10.3.0.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'openvkl' -version = '0.13.0' - -homepage = 'http://www.openvkl.org/' -description = """ -Intel® Open Volume Kernel Library (Intel® Open VKL) is a collection -of high-performance volume computation kernels, developed at Intel. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openvkl/openvkl/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['974608259e3a5d8e29d2dfe81c6b2b1830aadeb9bbdc87127f3a7c8631e9f1bd'] - -builddependencies = [ - ('ispc', '1.14.1', '', SYSTEM), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('tbb', '2020.3'), - ('Embree', '3.12.2'), - ('rkcommon', '1.6.1'), -] - -separate_build_dir = True - -start_dir = '%(name)s-%(version)s' - -configopts = '-DBUILD_BENCHMARKS:BOOL=OFF ' -configopts += '-DBUILD_EXAMPLES:BOOL=OFF ' -configopts += '-DBUILD_TESTING:BOOL=OFF ' - -sanity_check_paths = { - 'dirs': ['include/openvkl'], - 'files': ['lib/libopenvkl.so'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PAPI/PAPI-6.0.0-GCCcore-10.3.0.eb b/Golden_Repo/p/PAPI/PAPI-6.0.0-GCCcore-10.3.0.eb deleted file mode 100644 index 7910f9650bc0606a1e8e52cc05bcc10f14aa9db5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PAPI/PAPI-6.0.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,54 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_07-02.html -## - -easyblock = 'ConfigureMake' - -name = 'PAPI' -version = '6.0.0' - -homepage = 'http://icl.cs.utk.edu/projects/papi/' -description = """PAPI provides the tool designer and application engineer with a consistent interface and -methodology for use of the performance counter hardware found in most major microprocessors. PAPI enables -software engineers to see, in near real time, the relation between software performance and processor events. -In addition Component PAPI provides access to a collection of components -that expose performance measurement opportunites across the hardware and software stack. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -configopts = "--with-components=rapl " # for energy measurements - -source_urls = ['http://icl.cs.utk.edu/projects/papi/downloads/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1') -] - -start_dir = 'src' - -# parallel build doesn't always work -parallel = 1 - -runtest = 'fulltest' - -sanity_check_paths = { - 'files': ["bin/papi_%s" % x for x in ["avail", "clockres", "command_line", "component_avail", - "cost", "decode", "error_codes", "event_chooser", - "mem_info", "multiplex_cost", "native_avail", "version", - "xml_event_info"]], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/p/PAPI/PAPI-6.0.0-GCCcore-9.3.0.eb b/Golden_Repo/p/PAPI/PAPI-6.0.0-GCCcore-9.3.0.eb deleted file mode 100644 index 511df258082158427a9877b196aacf918554ce6a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PAPI/PAPI-6.0.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,54 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_07-02.html -## - -easyblock = 'ConfigureMake' - -name = 'PAPI' -version = '6.0.0' - -homepage = 'http://icl.cs.utk.edu/projects/papi/' -description = """PAPI provides the tool designer and application engineer with a consistent interface and -methodology for use of the performance counter hardware found in most major microprocessors. PAPI enables -software engineers to see, in near real time, the relation between software performance and processor events. -In addition Component PAPI provides access to a collection of components -that expose performance measurement opportunites across the hardware and software stack. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -configopts = "--with-components=rapl " # for energy measurements - -source_urls = ['http://icl.cs.utk.edu/projects/papi/downloads/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34') -] - -start_dir = 'src' - -# parallel build doesn't always work -parallel = 1 - -runtest = 'fulltest' - -sanity_check_paths = { - 'files': ["bin/papi_%s" % x for x in ["avail", "clockres", "command_line", "component_avail", - "cost", "decode", "error_codes", "event_chooser", - "mem_info", "multiplex_cost", "native_avail", "version", - "xml_event_info"]], - 'dirs': [], -} - -moduleclass = 'perf' diff --git a/Golden_Repo/p/PCRE/PCRE-8.44-GCCcore-10.3.0.eb b/Golden_Repo/p/PCRE/PCRE-8.44-GCCcore-10.3.0.eb deleted file mode 100644 index 27f8f2c5e15b259a46ea3d278dab8712965c4425..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PCRE/PCRE-8.44-GCCcore-10.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'PCRE' -version = '8.44' - -homepage = 'http://www.pcre.org/' -description = """ -The PCRE library is a set of functions that implement regular expression pattern matching using the same syntax -and semantics as Perl 5. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [('binutils', '2.36.1')] - -configopts = "--with-pic --disable-cpp --enable-utf --enable-unicode-properties" - -moduleclass = 'devel' diff --git a/Golden_Repo/p/PCRE/PCRE-8.44-GCCcore-9.3.0.eb b/Golden_Repo/p/PCRE/PCRE-8.44-GCCcore-9.3.0.eb deleted file mode 100644 index db9cf77e36d556b1ea11fdf51a182b01d382ef12..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PCRE/PCRE-8.44-GCCcore-9.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'PCRE' -version = '8.44' - -homepage = 'http://www.pcre.org/' -description = """ -The PCRE library is a set of functions that implement regular expression pattern matching using the same syntax -and semantics as Perl 5. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [('binutils', '2.34')] - -configopts = "--with-pic --disable-cpp --enable-utf --enable-unicode-properties" - -moduleclass = 'devel' diff --git a/Golden_Repo/p/PCRE2/PCRE2-10.34-GCCcore-10.3.0.eb b/Golden_Repo/p/PCRE2/PCRE2-10.34-GCCcore-10.3.0.eb deleted file mode 100644 index af8dad5eac9955993b90a03c3a839c9e00899aa0..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PCRE2/PCRE2-10.34-GCCcore-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'PCRE2' -version = '10.34' - -homepage = 'https://www.pcre.org/' -description = """ - The PCRE library is a set of functions that implement regular expression - pattern matching using the same syntax and semantics as Perl 5. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://download.sourceforge.net/pcre'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['da6aba7ba2509e918e41f4f744a59fa41a2425c59a298a232e7fe85691e00379'] - -builddependencies = [('binutils', '2.36.1')] - -configopts = "--enable-shared --enable-jit --enable-pcre2-16 --enable-unicode" - -sanity_check_paths = { - 'files': ["bin/pcre2-config", "bin/pcre2grep", "bin/pcre2test", - "lib/libpcre2-8.a", "lib/libpcre2-16.a"], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/PCRE2/PCRE2-10.34-GCCcore-9.3.0.eb b/Golden_Repo/p/PCRE2/PCRE2-10.34-GCCcore-9.3.0.eb deleted file mode 100644 index 0be52fab60b3ac26e1df90a129c94cf78ba17ad8..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PCRE2/PCRE2-10.34-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'PCRE2' -version = '10.34' - -homepage = 'https://www.pcre.org/' -description = """ - The PCRE library is a set of functions that implement regular expression - pattern matching using the same syntax and semantics as Perl 5. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://download.sourceforge.net/pcre'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['da6aba7ba2509e918e41f4f744a59fa41a2425c59a298a232e7fe85691e00379'] - -builddependencies = [('binutils', '2.34')] - -configopts = "--enable-shared --enable-jit --enable-pcre2-16 --enable-unicode" - -sanity_check_paths = { - 'files': ["bin/pcre2-config", "bin/pcre2grep", "bin/pcre2test", - "lib/libpcre2-8.a", "lib/libpcre2-16.a"], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/PDT/PDT-3.25-GCCcore-9.3.0.eb b/Golden_Repo/p/PDT/PDT-3.25-GCCcore-9.3.0.eb deleted file mode 100644 index 8ed6fdc23508b54e0b362debc6c244e66216ba33..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PDT/PDT-3.25-GCCcore-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -name = 'PDT' -version = '3.25' - -homepage = 'http://www.cs.uoregon.edu/research/pdt/' -description = """ -Program Database Toolkit (PDT) is a framework for analyzing source code -written in several programming languages and for making rich program -knowledge accessible to developers of static and dynamic analysis tools. -PDT implements a standard program representation, the program database -(PDB), that can be accessed in a uniform way through a class library -supporting common PDB operations. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['http://tau.uoregon.edu/pdt_releases/'] -sources = ['pdtoolkit-%(version)s.tar.gz'] -checksums = [ - '1037628d854edfeded3d847150d3e8fbd3774e8146407ce32f5021c80f6299be', # pdtoolkit-3.25.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.34'), -] - -moduleclass = 'perf' diff --git a/Golden_Repo/p/PDT/PDT-3.25.1-GCCcore-10.3.0.eb b/Golden_Repo/p/PDT/PDT-3.25.1-GCCcore-10.3.0.eb deleted file mode 100644 index 6560a829d567c63a5b0a5b96f5ac116024ca38f0..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PDT/PDT-3.25.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -name = 'PDT' -version = '3.25.1' - -homepage = 'http://www.cs.uoregon.edu/research/pdt/' -description = """ -Program Database Toolkit (PDT) is a framework for analyzing source code -written in several programming languages and for making rich program -knowledge accessible to developers of static and dynamic analysis tools. -PDT implements a standard program representation, the program database -(PDB), that can be accessed in a uniform way through a class library -supporting common PDB operations. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://tau.uoregon.edu/pdt_releases/'] -sources = ['pdtoolkit-%(version)s.tar.gz'] -checksums = [ - '0b6f8a6b8769c181b2ae6cae7298f04b8e3e3d68066f598ed24574e19500bc97', # pdtoolkit-3.25.1.tar.gz -] - -builddependencies = [ - # use same binutils version that was used when building GCCcore - ('binutils', '2.36.1'), -] - -moduleclass = 'perf' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-gomkl-2020.eb b/Golden_Repo/p/PETSc/PETSc-3.14-gomkl-2020.eb deleted file mode 100644 index 25e77de81f2d02c9586616d841921a5952d54c28..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-gomkl-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -name = "PETSc" -version = "3.14" - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with default 4-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('METIS', '5.1.0'), - ('ParMETIS', '4.0.3'), -] - -download_deps_static = [ - 'hypre', - 'spooles', - 'superlu', - 'superlu_dist', - 'mumps', - 'spai', - 'chaco', - 'sundials', - 'parms', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020-complex.eb b/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020-complex.eb deleted file mode 100644 index f7513363252d5d9a3d4c6c75706ec3694ba30b10..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020-complex.eb +++ /dev/null @@ -1,59 +0,0 @@ -name = "PETSc" -version = "3.14" -versionsuffix = '-complex' - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with complex numbers only and with default 4-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('METIS', '5.1.0'), - ('ParMETIS', '4.0.3'), -] - -download_deps_static = [ - 'spooles', - 'superlu', - 'superlu_dist', - 'mumps', - 'chaco', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' -configopts += '--with-scalar-type=complex ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020-int8.eb b/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020-int8.eb deleted file mode 100644 index b617483ebaf045773ad2c7a8d51db065000044e4..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020-int8.eb +++ /dev/null @@ -1,57 +0,0 @@ -name = "PETSc" -version = "3.14" -versionsuffix = '-int8' - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with 8-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), -] - -download_deps_static = [ - 'hypre', - 'metis', - 'parmetis', - 'spooles', - 'superlu_dist', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' -configopts += '--with-64-bit-indices=1 ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020.eb b/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020.eb deleted file mode 100644 index 0fb1c75f9ca629a159592f669601ac481b9d0a5e..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-gpsmkl-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -name = "PETSc" -version = "3.14" - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with default 4-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('METIS', '5.1.0'), - ('ParMETIS', '4.0.3'), -] - -download_deps_static = [ - 'hypre', - 'spooles', - 'superlu', - 'superlu_dist', - 'mumps', - 'spai', - 'chaco', - 'sundials', - 'parms', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020-complex.eb b/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020-complex.eb deleted file mode 100644 index ccee623724abefa6313540894776f71c4c88fcae..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020-complex.eb +++ /dev/null @@ -1,59 +0,0 @@ -name = "PETSc" -version = "3.14" -versionsuffix = '-complex' - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with complex numbers only and with default 4-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('METIS', '5.1.0'), - ('ParMETIS', '4.0.3'), -] - -download_deps_static = [ - 'spooles', - 'superlu', - 'superlu_dist', - 'mumps', - 'chaco', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' -configopts += '--with-scalar-type=complex ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020-int8.eb b/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020-int8.eb deleted file mode 100644 index 0d6b33e1e16016c22782c15f7fadc0f0319c7bf9..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020-int8.eb +++ /dev/null @@ -1,57 +0,0 @@ -name = "PETSc" -version = "3.14" -versionsuffix = '-int8' - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with 8-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), -] - -download_deps_static = [ - 'hypre', - 'metis', - 'parmetis', - 'spooles', - 'superlu_dist', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' -configopts += '--with-64-bit-indices=1 ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020.eb b/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020.eb deleted file mode 100644 index fbcbb0d5d56e63d24bde89890c3383fc7c2623da..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-intel-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -name = "PETSc" -version = "3.14" - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with default 4-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('METIS', '5.1.0'), - ('ParMETIS', '4.0.3'), -] - -download_deps_static = [ - 'hypre', - 'spooles', - 'superlu', - 'superlu_dist', - 'mumps', - 'spai', - 'chaco', - 'sundials', - 'parms', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020-complex.eb b/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020-complex.eb deleted file mode 100644 index 692b7bd1419f126d6a68d99d679dd0bd612b5952..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020-complex.eb +++ /dev/null @@ -1,59 +0,0 @@ -name = "PETSc" -version = "3.14" -versionsuffix = '-complex' - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with complex numbers only and with default 4-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('METIS', '5.1.0'), - ('ParMETIS', '4.0.3'), -] - -download_deps_static = [ - 'spooles', - 'superlu', - 'superlu_dist', - 'mumps', - 'chaco', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' -configopts += '--with-scalar-type=complex ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020-int8.eb b/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020-int8.eb deleted file mode 100644 index 45748e19859d74c75350e80bce4b5dfafff364fe..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020-int8.eb +++ /dev/null @@ -1,57 +0,0 @@ -name = "PETSc" -version = "3.14" -versionsuffix = '-int8' - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with 8-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), -] - -download_deps_static = [ - 'hypre', - 'metis', - 'parmetis', - 'spooles', - 'superlu_dist', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' -configopts += '--with-64-bit-indices=1 ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020.eb b/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020.eb deleted file mode 100644 index e5045ace6debeb196d4b8e69aae2090d548565a5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PETSc/PETSc-3.14-intel-para-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -name = "PETSc" -version = "3.14" - -homepage = 'http://www.mcs.anl.gov/petsc' -description = """PETSc, pronounced PET-see (the S is silent), is a suite -of data structures and routines for the scalable (parallel) solution -of scientific applications modeled by partial differential equations. - -This version is configured with several downloads of other libraries, -with --with-large-file-io and no debugging. It is a C and Fortran -version with default 4-Byte integer values. - -For more information see $PETSC_DIR/lib/petsc/conf/configure-hash. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} - -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['http://ftp.mcs.anl.gov/pub/petsc/release-snapshots'] -sources = ['petsc-lite-%s.tar.gz' % version] - -builddependencies = [ - ('CMake', '3.18.0') -] - -download_deps = [ - 'triangle', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('METIS', '5.1.0'), - ('ParMETIS', '4.0.3'), -] - -download_deps_static = [ - 'hypre', - 'spooles', - 'superlu', - 'superlu_dist', - 'mumps', - 'spai', - 'chaco', - 'sundials', - 'parms', -] - -configopts = '--with-large-file-io --with-cxx-dialect=C++11 ' - -shared_libs = 1 - -postinstallcmds = [ - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/mpi/mpiaij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/aij/seq/aij.h %(installdir)s/include/petsc/private/', - 'cp %(builddir)s/petsc-%(version)s.0/src/mat/impls/dense/seq/dense.h %(installdir)s/include/petsc/private/' -] - -moduleclass = 'numlib' diff --git a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-gomkl-2020.eb b/Golden_Repo/p/PLUMED/PLUMED-2.6.1-gomkl-2020.eb deleted file mode 100644 index 0bde13107e957d879d5b9077c0ed5b0bf7757f0e..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-gomkl-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -# by Ward Poelmans <wpoely86@gmail.com> - -easyblock = 'ConfigureMake' - -name = 'PLUMED' -version = '2.6.1' - -homepage = 'http://www.plumed.org' -description = """PLUMED is an open source library for free energy calculations in molecular systems which - works together with some of the most popular molecular dynamics engines. Free energy calculations can be - performed as a function of many order parameters with a particular focus on biological problems, using - state of the art methods such as metadynamics, umbrella sampling and Jarzynski-equation based steered MD. - The software, written in C++, can be easily interfaced with both fortran and C/C++ codes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gomkl', 'version': '2020'} -toolchainopts = {'usempi': 'True'} - -source_urls = ['https://github.com/plumed/plumed2/archive/'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('zlib', '1.2.11'), - ('GSL', '2.6'), - ('libmatheval', '1.1.11'), -] - -preconfigopts = 'env FC=$MPIF90 LIBS="$LIBLAPACK $LIBS" ' -configopts = ' --exec-prefix=%(installdir)s --enable-gsl --enable-modules=all' -prebuildopts = 'source sourceme.sh && ' - -sanity_check_paths = { - 'files': ['bin/plumed', 'lib/libplumedKernel.%s' % SHLIB_EXT, 'lib/libplumed.%s' % SHLIB_EXT], - 'dirs': ['lib/plumed'] -} - -modextrapaths = { - 'PLUMED_KERNEL': 'lib/libplumedKernel.%s' % SHLIB_EXT, - 'PLUMED_ROOT': 'lib/plumed', -} - -moduleclass = 'chem' diff --git a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-gpsmkl-2020.eb b/Golden_Repo/p/PLUMED/PLUMED-2.6.1-gpsmkl-2020.eb deleted file mode 100644 index d997250a1c944cf619bdc678fef1d14b96a2d1d4..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-gpsmkl-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -# by Ward Poelmans <wpoely86@gmail.com> - -easyblock = 'ConfigureMake' - -name = 'PLUMED' -version = '2.6.1' - -homepage = 'http://www.plumed.org' -description = """PLUMED is an open source library for free energy calculations in molecular systems which - works together with some of the most popular molecular dynamics engines. Free energy calculations can be - performed as a function of many order parameters with a particular focus on biological problems, using - state of the art methods such as metadynamics, umbrella sampling and Jarzynski-equation based steered MD. - The software, written in C++, can be easily interfaced with both fortran and C/C++ codes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': 'True'} - -source_urls = ['https://github.com/plumed/plumed2/archive/'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('zlib', '1.2.11'), - ('GSL', '2.6'), - ('libmatheval', '1.1.11'), -] - -preconfigopts = 'env FC=$MPIF90 LIBS="$LIBLAPACK $LIBS" ' -configopts = ' --exec-prefix=%(installdir)s --enable-gsl --enable-modules=all' -prebuildopts = 'source sourceme.sh && ' - -sanity_check_paths = { - 'files': ['bin/plumed', 'lib/libplumedKernel.%s' % SHLIB_EXT, 'lib/libplumed.%s' % SHLIB_EXT], - 'dirs': ['lib/plumed'] -} - -modextrapaths = { - 'PLUMED_KERNEL': 'lib/libplumedKernel.%s' % SHLIB_EXT, - 'PLUMED_ROOT': 'lib/plumed', -} - -moduleclass = 'chem' diff --git a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-intel-2020.eb b/Golden_Repo/p/PLUMED/PLUMED-2.6.1-intel-2020.eb deleted file mode 100644 index 5536f845ad563adaf039f550d138d404d19ce1ed..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-intel-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -# by Ward Poelmans <wpoely86@gmail.com> - -easyblock = 'ConfigureMake' - -name = 'PLUMED' -version = '2.6.1' - -homepage = 'http://www.plumed.org' -description = """PLUMED is an open source library for free energy calculations in molecular systems which - works together with some of the most popular molecular dynamics engines. Free energy calculations can be - performed as a function of many order parameters with a particular focus on biological problems, using - state of the art methods such as metadynamics, umbrella sampling and Jarzynski-equation based steered MD. - The software, written in C++, can be easily interfaced with both fortran and C/C++ codes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'usempi': 'True'} - -source_urls = ['https://github.com/plumed/plumed2/archive/'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('zlib', '1.2.11'), - ('GSL', '2.6'), - ('libmatheval', '1.1.11'), -] - -preconfigopts = 'env FC=$MPIF90 LIBS="$LIBLAPACK $LIBS" ' -configopts = ' --exec-prefix=%(installdir)s --enable-gsl --enable-modules=all' -prebuildopts = 'source sourceme.sh && ' - -sanity_check_paths = { - 'files': ['bin/plumed', 'lib/libplumedKernel.%s' % SHLIB_EXT, 'lib/libplumed.%s' % SHLIB_EXT], - 'dirs': ['lib/plumed'] -} - -modextrapaths = { - 'PLUMED_KERNEL': 'lib/libplumedKernel.%s' % SHLIB_EXT, - 'PLUMED_ROOT': 'lib/plumed', -} - -moduleclass = 'chem' diff --git a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-intel-para-2020.eb b/Golden_Repo/p/PLUMED/PLUMED-2.6.1-intel-para-2020.eb deleted file mode 100644 index 480330f9a8f7a990d57c83ca1035e66c33120604..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-intel-para-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -# by Ward Poelmans <wpoely86@gmail.com> - -easyblock = 'ConfigureMake' - -name = 'PLUMED' -version = '2.6.1' - -homepage = 'http://www.plumed.org' -description = """PLUMED is an open source library for free energy calculations in molecular systems which - works together with some of the most popular molecular dynamics engines. Free energy calculations can be - performed as a function of many order parameters with a particular focus on biological problems, using - state of the art methods such as metadynamics, umbrella sampling and Jarzynski-equation based steered MD. - The software, written in C++, can be easily interfaced with both fortran and C/C++ codes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': 'True'} - -source_urls = ['https://github.com/plumed/plumed2/archive/'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('zlib', '1.2.11'), - ('GSL', '2.6'), - ('libmatheval', '1.1.11'), -] - -preconfigopts = 'env FC=$MPIF90 LIBS="$LIBLAPACK $LIBS" ' -configopts = ' --exec-prefix=%(installdir)s --enable-gsl --enable-modules=all' -prebuildopts = 'source sourceme.sh && ' - -sanity_check_paths = { - 'files': ['bin/plumed', 'lib/libplumedKernel.%s' % SHLIB_EXT, 'lib/libplumed.%s' % SHLIB_EXT], - 'dirs': ['lib/plumed'] -} - -modextrapaths = { - 'PLUMED_KERNEL': 'lib/libplumedKernel.%s' % SHLIB_EXT, - 'PLUMED_ROOT': 'lib/plumed', -} - -moduleclass = 'chem' diff --git a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-iomkl-2020.eb b/Golden_Repo/p/PLUMED/PLUMED-2.6.1-iomkl-2020.eb deleted file mode 100644 index 816e14c0c7ed61e50af913815f3a00094e45afef..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PLUMED/PLUMED-2.6.1-iomkl-2020.eb +++ /dev/null @@ -1,44 +0,0 @@ -# by Ward Poelmans <wpoely86@gmail.com> - -easyblock = 'ConfigureMake' - -name = 'PLUMED' -version = '2.6.1' - -homepage = 'http://www.plumed.org' -description = """PLUMED is an open source library for free energy calculations in molecular systems which - works together with some of the most popular molecular dynamics engines. Free energy calculations can be - performed as a function of many order parameters with a particular focus on biological problems, using - state of the art methods such as metadynamics, umbrella sampling and Jarzynski-equation based steered MD. - The software, written in C++, can be easily interfaced with both fortran and C/C++ codes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iomkl', 'version': '2020'} -toolchainopts = {'usempi': 'True'} - -source_urls = ['https://github.com/plumed/plumed2/archive/'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('zlib', '1.2.11'), - ('GSL', '2.6'), - ('libmatheval', '1.1.11'), -] - -preconfigopts = 'env FC=$MPIF90 LIBS="$LIBLAPACK $LIBS" ' -configopts = ' --exec-prefix=%(installdir)s --enable-gsl --enable-modules=all' -prebuildopts = 'source sourceme.sh && ' - -sanity_check_paths = { - 'files': ['bin/plumed', 'lib/libplumedKernel.%s' % SHLIB_EXT, 'lib/libplumed.%s' % SHLIB_EXT], - 'dirs': ['lib/plumed'] -} - -modextrapaths = { - 'PLUMED_KERNEL': 'lib/libplumedKernel.%s' % SHLIB_EXT, - 'PLUMED_ROOT': 'lib/plumed', -} - -moduleclass = 'chem' diff --git a/Golden_Repo/p/PLUMED/PLUMED-2.7.0-gpsmpi-2020.eb b/Golden_Repo/p/PLUMED/PLUMED-2.7.0-gpsmpi-2020.eb deleted file mode 100644 index bcc374e01dc40cf599a9ac9db1b1b36effa9a958..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PLUMED/PLUMED-2.7.0-gpsmpi-2020.eb +++ /dev/null @@ -1,46 +0,0 @@ -# Built with EasyBuild version 4.3.1 on 2021-01-09_13-02-25 -# by Ward Poelmans <wpoely86@gmail.com> - -easyblock = 'ConfigureMake' - -name = 'PLUMED' -version = '2.7.0' - -homepage = 'http://www.plumed.org' -description = """PLUMED is an open source library for free energy calculations in molecular systems -which works together with some of the most popular molecular dynamics engines. Free energy -calculations can be performed as a function of many order parameters with a particular focus on -biological problems, using state of the art methods such as metadynamics, umbrella sampling and -Jarzynski-equation based steered MD. The software, written in C++, can be easily interfaced with -both Fortran and C/C++ codes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'usempi': 'True'} - -source_urls = ['https://github.com/plumed/plumed2/archive/'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('zlib', '1.2.11'), - ('GSL', '2.6'), - ('libmatheval', '1.1.11'), -] - -preconfigopts = 'env FC=$MPIF90 LIBS="$LIBLAPACK $LIBS" ' -configopts = ' --exec-prefix=%(installdir)s --enable-gsl --enable-modules=all' -prebuildopts = 'source sourceme.sh && ' - -sanity_check_paths = { - 'files': ['bin/plumed', 'lib/libplumedKernel.%s' % SHLIB_EXT, 'lib/libplumed.%s' % SHLIB_EXT], - 'dirs': ['lib/plumed'] -} - -modextrapaths = { - 'PLUMED_KERNEL': 'lib/libplumedKernel.%s' % SHLIB_EXT, - 'PLUMED_ROOT': 'lib/plumed', -} - -moduleclass = 'chem' diff --git a/Golden_Repo/p/PLUMED/plumed_ld.gold.patch b/Golden_Repo/p/PLUMED/plumed_ld.gold.patch deleted file mode 100644 index c80feaaf5fc959597f8087721df133ffa81c2351..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PLUMED/plumed_ld.gold.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -ruN plumed2-2.4.3.orig/configure plumed2-2.4.3/configure ---- plumed2-2.4.3.orig/configure 2018-10-15 14:19:18.282735508 +0200 -+++ plumed2-2.4.3/configure 2018-10-15 14:19:33.824067213 +0200 -@@ -8785,7 +8785,7 @@ - - LD_RO= - if test "$ld_r" == true ; then --for test_LD_RO in "$($CXX --print-prog-name=ld) -r -o" "$CXX -Wl,-r -o" "ld -r -o" -+for test_LD_RO in "$($CXX --print-prog-name=ld.gold) -r -o" "$CXX -Wl,-r -o" "ld -r -o" - do - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether C++ objects can be grouped with $test_LD_RO" >&5 - $as_echo_n "checking whether C++ objects can be grouped with $test_LD_RO... " >&6; } diff --git a/Golden_Repo/p/PROJ/PROJ-7.1.0-GCCcore-10.3.0.eb b/Golden_Repo/p/PROJ/PROJ-7.1.0-GCCcore-10.3.0.eb deleted file mode 100644 index 25d95aa254817819750b6b5ca128cd6230be1839..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PROJ/PROJ-7.1.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2014-2015 The Cyprus Institute -# Authors:: Thekla Loizou <t.loizou@cyi.ac.cy> -# License:: MIT/GPL -# -## -easyblock = 'ConfigureMake' - -name = 'PROJ' -version = '7.1.0' - -homepage = 'http://trac.osgeo.org/proj/' -description = """Program proj is a standard Unix filter function which converts -geographic longitude and latitude coordinates into cartesian coordinates -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://download.osgeo.org/proj/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('SQLite', '3.32.3'), - ('LibTIFF', '4.1.0'), - ('cURL', '7.71.1'), -] - -sanity_check_paths = { - 'files': ['bin/cct', 'bin/cs2cs', 'bin/geod', 'bin/gie', 'bin/proj', - 'bin/projinfo', 'lib/libproj.a', 'lib/libproj.%s' % SHLIB_EXT], - 'dirs': ['include'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/p/PROJ/PROJ-7.1.0-GCCcore-9.3.0.eb b/Golden_Repo/p/PROJ/PROJ-7.1.0-GCCcore-9.3.0.eb deleted file mode 100644 index d602b251f9816672035a0502ddefda40aae0362f..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PROJ/PROJ-7.1.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2014-2015 The Cyprus Institute -# Authors:: Thekla Loizou <t.loizou@cyi.ac.cy> -# License:: MIT/GPL -# -## -easyblock = 'ConfigureMake' - -name = 'PROJ' -version = '7.1.0' - -homepage = 'http://trac.osgeo.org/proj/' -description = """Program proj is a standard Unix filter function which converts -geographic longitude and latitude coordinates into cartesian coordinates -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://download.osgeo.org/proj/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), - ('SQLite', '3.32.3'), - ('LibTIFF', '4.1.0'), - ('cURL', '7.71.1'), -] - -sanity_check_paths = { - 'files': ['bin/cct', 'bin/cs2cs', 'bin/geod', 'bin/gie', 'bin/proj', - 'bin/projinfo', 'lib/libproj.a', 'lib/libproj.%s' % SHLIB_EXT], - 'dirs': ['include'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/p/Pandoc/Pandoc-2.11.0.4.eb b/Golden_Repo/p/Pandoc/Pandoc-2.11.0.4.eb deleted file mode 100644 index b8586236691c42291f18c21fe33b9cae4d767fea..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Pandoc/Pandoc-2.11.0.4.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'Tarball' - -name = 'Pandoc' -version = '2.11.0.4' - -homepage = 'http://pandoc.org' -description = """ -If you need to convert files from one markup format into another, -pandoc is your swiss-army knife""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://github.com/jgm/pandoc/releases/download/%(version)s'] -sources = ['pandoc-%(version)s-linux-amd64.tar.gz'] -checksums = ['94e070f3eff83fa0692461047dbf7bcd0f58d96f3235c66d7366a22e42786ac0'] - -sanity_check_paths = { - 'files': ['bin/pandoc'], - 'dirs': ['share'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/Pango/Pango-1.44.7-GCCcore-10.3.0.eb b/Golden_Repo/p/Pango/Pango-1.44.7-GCCcore-10.3.0.eb deleted file mode 100644 index c9aed81f07cb84ac35c8799a39e3ac5ffa6b7652..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Pango/Pango-1.44.7-GCCcore-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'Pango' -version = '1.44.7' - -homepage = 'http://www.pango.org/' -description = """Pango is a library for laying out and rendering of text, with an emphasis on internationalization. -Pango can be used anywhere that text layout is needed, though most of the work on Pango so far has been done in the -context of the GTK+ widget toolkit. Pango forms the core of text and font handling for GTK+-2.x. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Coreutils', '8.32'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('X11', '20200222'), - ('GLib', '2.64.4'), - ('cairo', '1.17.2'), - ('HarfBuzz', '2.6.7'), - ('FriBidi', '1.0.9'), -] - -configopts = '-Ddefault_library=both' - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/Pango/Pango-1.44.7-GCCcore-9.3.0.eb b/Golden_Repo/p/Pango/Pango-1.44.7-GCCcore-9.3.0.eb deleted file mode 100644 index afeb27008505dfb389520e86d52e41af38fc355c..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Pango/Pango-1.44.7-GCCcore-9.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'MesonNinja' - -name = 'Pango' -version = '1.44.7' - -homepage = 'http://www.pango.org/' -description = """Pango is a library for laying out and rendering of text, with an emphasis on internationalization. -Pango can be used anywhere that text layout is needed, though most of the work on Pango so far has been done in the -context of the GTK+ widget toolkit. Pango forms the core of text and font handling for GTK+-2.x. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [FTPGNOME_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -builddependencies = [ - ('binutils', '2.34'), - ('Coreutils', '8.32'), - ('GObject-Introspection', '1.64.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('X11', '20200222'), - ('GLib', '2.64.4'), - ('cairo', '1.17.2'), - ('HarfBuzz', '2.6.7'), - ('FriBidi', '1.0.9'), -] - -configopts = '-Ddefault_library=both' - -modextrapaths = { - 'GI_TYPELIB_PATH': 'lib64/girepository-1.0', - 'XDG_DATA_DIRS': 'share', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/Panoply/Panoply-4.11.6.eb b/Golden_Repo/p/Panoply/Panoply-4.11.6.eb deleted file mode 100644 index 9223d2baa90325b771507e0c7a47e3ead24ed45c..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Panoply/Panoply-4.11.6.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'PackedBinary' - -name = 'Panoply' -version = '4.11.6' - -homepage = 'https://www.giss.nasa.gov/tools/panoply' -description = "Panoply plots geo-referenced and other arrays from netCDF, HDF, GRIB, and other datasets." - -site_contacts = 'a.ghasemi@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['%(name)sJ-%(version)s.tgz'] -source_urls = ['https://www.giss.nasa.gov/tools/panoply/download/'] - -dependencies = [ - ('Java', '15', '', SYSTEM), -] - -postinstallcmds = [ - 'mkdir %(installdir)s/bin', - 'mv %(installdir)s/panoply.sh %(installdir)s/bin', - 'sed -i "s/jars/..\/jars/g" %(installdir)s/bin/panoply.sh', - 'ln -s %(installdir)s/bin/panoply.sh %(installdir)s/bin/panoply', -] - -sanity_check_paths = { - 'files': ['bin/panoply'], - 'dirs': ['jars'] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2020-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2020-double.eb deleted file mode 100644 index 91522a15e93f6e58df9f60b95f6ef37247a2b861..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2020-double.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2020.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2020.eb deleted file mode 100644 index 9368785d588974757eadccdc941bdfa6b080292f..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2021-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2021-double.eb deleted file mode 100644 index 014ab450e041efca9cba9b71a4925ceb202fe16a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2021-double.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] - -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2021.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2021.eb deleted file mode 100644 index bd05903f5eb5ac74a9a80e576f9a366126d1b9b6..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gompi-2021.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2020-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2020-double.eb deleted file mode 100644 index 63b61195f0b3d84a3913aa4f344d988c2d6c59a8..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2020-double.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2020.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2020.eb deleted file mode 100644 index 37a67877a5a735cb850a0fa00f46cb16213bc7a1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2021-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2021-double.eb deleted file mode 100644 index 5c83bbdf683b8f4e08383a46a47775be6db4280a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2021-double.eb +++ /dev/null @@ -1,38 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2021.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2021.eb deleted file mode 100644 index 428110684c473ffef24f6bd1aa277147352a2d20..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-gpsmpi-2021.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2020-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2020-double.eb deleted file mode 100644 index 6d6eeb7fa8fbf29c8be500a5ccfdc0be8d318b96..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2020-double.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2020.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2020.eb deleted file mode 100644 index 5db53ba5a6036289581b7d93a230f9517195f739..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2021-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2021-double.eb deleted file mode 100644 index 4f4ea0d6be58c4f148da7c481efc3436079d4705..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2021-double.eb +++ /dev/null @@ -1,38 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2021.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2021.eb deleted file mode 100644 index 71b7e6b8ef9cb03929c5a78806392ed7d4aa2841..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iimpi-2021.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2020-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2020-double.eb deleted file mode 100644 index b34f369c5d5b0e01153abdcf4b916090f4524ef5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2020-double.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2020.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2020.eb deleted file mode 100644 index 75d7aeeb87a40ec38ff326f2e1bd3c64e1258144..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2021-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2021-double.eb deleted file mode 100644 index 85df73541748f7108b33852551406e240cf10f74..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2021-double.eb +++ /dev/null @@ -1,38 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2021.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2021.eb deleted file mode 100644 index cc2de9adab5aa940b3ad06735d484e63ae162dc8..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-iompi-2021.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2020-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2020-double.eb deleted file mode 100644 index 4526c49c28509445f69b77e33b89a47fde906982..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2020-double.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2020.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2020.eb deleted file mode 100644 index d9652f05594339d16bfb4c9a211b9e34e6c20dfc..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis', - 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2021-double.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2021-double.eb deleted file mode 100644 index ab9cbf6bd613784aadff9b3192abbe6d471e8230..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2021-double.eb +++ /dev/null @@ -1,38 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' -versionsuffix = '-double' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch', - # Patch for double precision - 'parmetis-4.0.3-double.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2021.eb b/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2021.eb deleted file mode 100644 index 505d652b65cdb50dd3e0231f2bdeba34759601a1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/ParMETIS-4.0.3-ipsmpi-2021.eb +++ /dev/null @@ -1,35 +0,0 @@ -name = 'ParMETIS' -version = '4.0.3' - -homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview' -description = """ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning -unstructured graphs, meshes, and for computing fill-reducing orderings of sparse matrices. ParMETIS extends the -functionality provided by METIS and includes routines that are especially suited for parallel AMR computations and large -scale numerical simulations. The algorithms implemented in ParMETIS are based on the parallel multilevel k-way -graph-partitioning, adaptive repartitioning, and parallel multi-constrained partitioning schemes. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de) ' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'usempi': True, 'pic': True, 'openmp': True} - -source_urls = ['http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - # Needed for elemental - 'parmetis_computevertexseparator.patch' -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -modextravars = { - 'PARMETIS_ROOT': '%(installdir)s', - 'PARMETIS_LIB': '%(installdir)s/lib', - 'PARMETIS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/p/ParMETIS/parmetis-4.0.3-double.patch b/Golden_Repo/p/ParMETIS/parmetis-4.0.3-double.patch deleted file mode 100644 index bf6f1c5a889fa3526873dd8a000bbffbda800cb8..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/parmetis-4.0.3-double.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- parmetis-4.0.3/metis/include/metis.h.orig 2013-03-30 17:24:50.000000000 +0100 -+++ parmetis-4.0.3/metis/include/metis.h 2016-04-20 11:07:49.485844000 +0200 -@@ -40,7 +40,7 @@ - 32 : single precission floating point (float) - 64 : double precission floating point (double) - --------------------------------------------------------------------------*/ --#define REALTYPEWIDTH 32 -+#define REALTYPEWIDTH 64 - - - diff --git a/Golden_Repo/p/ParMETIS/parmetis_computevertexseparator.patch b/Golden_Repo/p/ParMETIS/parmetis_computevertexseparator.patch deleted file mode 100644 index adbc37c8510b4deeb44df2e2d1fd3652a257621a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParMETIS/parmetis_computevertexseparator.patch +++ /dev/null @@ -1,186 +0,0 @@ -diff -ruN parmetis-4.0.3.old/include/parmetis.h parmetis-4.0.3/include/parmetis.h ---- parmetis-4.0.3.old/include/parmetis.h 2017-04-05 17:20:11.888709466 +0200 -+++ parmetis-4.0.3/include/parmetis.h 2017-04-05 17:21:38.247478696 +0200 -@@ -113,6 +113,12 @@ - idx_t *vtxdist, idx_t *xadj, idx_t *adjncy, idx_t *numflag, - idx_t *options, idx_t *order, idx_t *sizes, MPI_Comm *comm); - -+void ParMETIS_ComputeVertexSeparator( -+ idx_t *vtxdist, idx_t *xadj, idx_t *adjncy, -+ idx_t *p_nseps, idx_t *s_nseps, -+ real_t *ubfrac, idx_t *idbglvl, idx_t *order, idx_t *sizes, -+ MPI_Comm *comm); -+ - #ifdef __cplusplus - } - #endif -diff -ruN parmetis-4.0.3.old/libparmetis/ComputeVertexSeparator.c parmetis-4.0.3/libparmetis/ComputeVertexSeparator.c ---- parmetis-4.0.3.old/libparmetis/ComputeVertexSeparator.c 1970-01-01 01:00:00.000000000 +0100 -+++ parmetis-4.0.3/libparmetis/ComputeVertexSeparator.c 2017-04-05 17:22:32.477589755 +0200 -@@ -0,0 +1,166 @@ -+/* -+ * Copyright 1997, Regents of the University of Minnesota -+ * Created by modifying ParMETIS routines by Jack Poulson, 2012-2015 -+ */ -+#include <parmetislib.h> -+ -+void ElParallelLabelVertices -+( ctrl_t *ctrl, graph_t *graph, idx_t *order, idx_t *sizes ) -+{ -+ idx_t i, j, nvtxs, id; -+ idx_t *where, *lpwgts, *gpwgts; -+ idx_t sizescan[3]; -+ -+ nvtxs = graph->nvtxs; -+ where = graph->where; -+ lpwgts = graph->lpwgts; -+ gpwgts = graph->gpwgts; -+ -+ /* Compute the local sizes of the left side, right side, and separator */ -+ iset(3, 0, lpwgts); -+ for (i=0; i<nvtxs; i++) -+ lpwgts[where[i]]++; -+ -+ /* Perform a Prefix scan of the separator size to determine the boundaries */ -+ gkMPI_Scan((void *)lpwgts, (void *)sizescan, 3, IDX_T, MPI_SUM, ctrl->comm); -+ gkMPI_Allreduce -+ ((void *)lpwgts, (void *)gpwgts, 3, IDX_T, MPI_SUM, ctrl->comm); -+ -+ /* Fill in the size of the partition */ -+ sizes[0] = gpwgts[0]; -+ sizes[1] = gpwgts[1]; -+ sizes[2] = gpwgts[2]; -+ -+ for( i=2; i>=0; --i ) -+ for( j=i+1; j<3; ++j ) -+ sizescan[i] += gpwgts[j]; -+ for( i=0; i<3; i++ ) -+ sizescan[i] -= lpwgts[i]; -+ -+ for( i=0; i<nvtxs; i++ ) -+ { -+ id = where[i]; -+ PASSERT(ctrl, id <= 2); -+ sizescan[id]++; -+ PASSERT(ctrl, order[i] == -1); -+ order[i] = graph->gnvtxs - sizescan[id]; -+ } -+} -+ -+void ElParallelOrder -+( ctrl_t *ctrl, graph_t *graph, idx_t *order, idx_t *sizes ) -+{ -+ idx_t i, nvtxs; -+ -+ nvtxs = graph->nvtxs; -+ iset(nvtxs, -1, order); -+ -+ /* graph->where = ismalloc(nvtxs, 0, "ElOrder: graph->where"); */ -+ /* If we computed an initial partition with Global_Partition, then we -+ should run the following instead of the above ismalloc of graph->where*/ -+ iset(nvtxs, 0, graph->where); -+ gk_free((void **)&graph->match, -+ (void **)&graph->cmap, -+ (void **)&graph->rlens, -+ (void **)&graph->slens, -+ (void **)&graph->rcand, LTERM); -+ -+ Order_Partition_Multiple(ctrl, graph); -+ -+ ElParallelLabelVertices(ctrl, graph, order, sizes); -+} -+ -+void ParMETIS_ComputeVertexSeparator -+( idx_t *vtxdist, idx_t *xadj, idx_t *adjncy, -+ idx_t *p_nseps, idx_t *s_nseps, -+ real_t *ubfrac, idx_t *idbglvl, idx_t *order, idx_t *sizes, -+ MPI_Comm *comm ) -+{ -+ idx_t i, j, npes, npesNonzero, mype, mypeNonzero, dbglvl, status, haveData; -+ ctrl_t *ctrl; -+ graph_t *graph; -+ MPI_Comm nonzeroComm, nullComm; -+ size_t curmem; -+ -+ gkMPI_Comm_size(*comm, &npes); -+ gkMPI_Comm_rank(*comm, &mype); -+ -+ if( vtxdist[npes] == 0 ) -+ { -+ sizes[0] = 0; -+ sizes[1] = 0; -+ sizes[2] = 0; -+ return; -+ } -+ -+ haveData = ( vtxdist[mype+1]-vtxdist[mype] != 0 ); -+ if( haveData ) -+ gkMPI_Comm_split(*comm, 1, mype, &nonzeroComm); -+ else -+ gkMPI_Comm_split(*comm, MPI_UNDEFINED, 0, &nullComm); -+ -+ if( !haveData ) -+ { -+ sizes[0] = sizes[1] = sizes[2] = 0; -+ gkMPI_Allreduce(MPI_IN_PLACE, (void *)sizes, 3, IDX_T, MPI_SUM, *comm); -+ return; -+ } -+ -+ gkMPI_Comm_size(nonzeroComm, &npesNonzero); -+ gkMPI_Comm_rank(nonzeroComm, &mypeNonzero); -+ -+ /* Compress the vtxdist data to make it match the new communicator */ -+ j=0; -+ for( i=1; i<npes+1; ++i ) -+ if( vtxdist[i] != vtxdist[j] ) -+ vtxdist[++j] = vtxdist[i]; -+ -+ status = METIS_OK; -+ gk_malloc_init(); -+ curmem = gk_GetCurMemoryUsed(); -+ -+ ctrl = SetupCtrl(PARMETIS_OP_KMETIS, NULL, 1, 2, NULL, NULL, nonzeroComm); -+ -+ dbglvl = (idbglvl == NULL ? 0 : *idbglvl); -+ ctrl->dbglvl = dbglvl; -+ -+ graph = SetupGraph(ctrl, 1, vtxdist, xadj, NULL, NULL, adjncy, NULL, 0); -+ AllocateWSpace(ctrl, 10*graph->nvtxs); -+ -+ /* Compute an initial partition: for some reason this improves the quality */ -+ ctrl->CoarsenTo = gk_min(vtxdist[npesNonzero]+1, -+ 200*gk_max(npesNonzero,ctrl->nparts)); -+ Global_Partition(ctrl, graph); -+ -+ /* Compute an ordering */ -+ ctrl->optype = PARMETIS_OP_OMETIS; -+ ctrl->partType = ORDER_PARTITION; -+ ctrl->mtype = PARMETIS_MTYPE_GLOBAL; -+ ctrl->rtype = PARMETIS_SRTYPE_2PHASE; -+ ctrl->p_nseps = (p_nseps == NULL ? 1 : *p_nseps); -+ ctrl->s_nseps = (s_nseps == NULL ? 1 : *s_nseps); -+ ctrl->ubfrac = (ubfrac == NULL ? ORDER_UNBALANCE_FRACTION : *ubfrac); -+ ctrl->dbglvl = dbglvl; -+ ctrl->ipart = ISEP_NODE; -+ ctrl->CoarsenTo = gk_min(graph->gnvtxs-1,1500*npesNonzero); -+ ElParallelOrder(ctrl, graph, order, sizes); -+ -+ FreeInitialGraphAndRemap(graph); -+ -+ /* Pass the data to the early-exiting processes with an allreduce */ -+ if( mypeNonzero != 0 ) -+ sizes[0] = sizes[1] = sizes[2] = 0; -+ gkMPI_Allreduce(MPI_IN_PLACE, (void*)sizes, 3, IDX_T, MPI_SUM, *comm); -+ -+ MPI_Comm_free( &nonzeroComm ); -+ -+ goto DONE; -+ -+DONE: -+ FreeCtrl(&ctrl); -+ if (gk_GetCurMemoryUsed() - curmem > 0) { -+ printf("ParMETIS appears to have a memory leak of %zdbytes. Report this.\n", -+ (ssize_t)(gk_GetCurMemoryUsed() - curmem)); -+ } -+ gk_malloc_cleanup(0); -+} diff --git a/Golden_Repo/p/ParaView/ParaView-5.8.1-EGL-gpsmkl-2020-Python-3.8.5.eb b/Golden_Repo/p/ParaView/ParaView-5.8.1-EGL-gpsmkl-2020-Python-3.8.5.eb deleted file mode 100644 index ba726fd4171c4d3041f3103f0cd66aa37a9fb3ed..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParaView/ParaView-5.8.1-EGL-gpsmkl-2020-Python-3.8.5.eb +++ /dev/null @@ -1,358 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ParaView' -version = '5.8.1' -versionsuffix = '-EGL-Python-%(pyver)s' - -homepage = "http://www.paraview.org" -description = """Paraview is a scientific parallel visualizer. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -local_dwnlsfx_src = 'download.php?submit=Download&version=v%(version_major_minor)s&type=source&os=Sources&downloadFile=' -local_dwnlsfx_dat = 'download.php?submit=Download&version=v%(version_major_minor)s&type=data&os=Sources&downloadFile=' -local_dwnlsfx_tdat = 'download.php?submit=Download&version=v%(version_major_minor)s&type=data&os=Sources&downloadFile=' - -source_urls = [('http://www.paraview.org/paraview-downloads/%s' % local_dwnlsfx_src), - ('http://www.paraview.org/paraview-downloads/%s' % local_dwnlsfx_dat)] -# ('http://www.paraview.org/paraview-downloads/%s' % local_dwnlsfx_tdat), - -sources = [("ParaView-v%(version)s.tar.gz"), - ("ParaViewData-v%(version)s.tar.gz")] -# ("ParaViewTestingData-v%(version)s.tar.gz"), - -checksums = [('sha256', '384241fc69bbdb412465dcb5ef88b88ee7ae0e92e7d7cde53be54b19296cf9fa'), - ('sha256', '9bf8f6acc4533520143ba2e19a04f6c34b1537c0b9c95d8e975623eafe3acea9')] - -patches = [ - ("python-5.8.1-simpleCatalystSupport_0.4.patch"), -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('git', '2.28.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Boost', '1.74.0', '', ('gpsmpi', '2020')), - ('X11', '20200222'), - ('bzip2', '1.0.8'), - ('HDF5', '1.10.6'), - ('FFmpeg', '4.3.1'), - ('Embree', '3.8.0'), - ('OSPRay', '1.8.4'), - ('libpng', '1.6.37'), - ('expat', '2.2.9'), - ('freetype', '2.10.1'), - ('libjpeg-turbo', '2.0.5'), - ('libxml2', '2.9.10'), - ('LibTIFF', '4.1.0'), - ('zlib', '1.2.11'), - ('netCDF', '4.7.4'), - ('netCDF-C++4', '4.3.1'), - ('netCDF-Fortran', '4.5.3'), - ('mpi4py', '3.0.3', '-Python-%(pyver)s'), - ('nlohmann-json', '3.9.1'), # for ParFlow plugin - # ('VTK', '8.2.0', '-Python-%(pyver)s', ('gcccoremkl', '8.3.0-2019.3.199')), - # ('VTKm','1.1.0','-AVX2'), - ('Qt5', '5.14.2'), - ('SciPy-Stack', '2020', '-Python-%(pyver)s', ('gcccoremkl', '9.3.0-2020.2.254')), - ('VirtualGL', '2.6.4'), - ('OpenGL', '2020'), -] - -separate_build_dir = True -# parallel = 24 - -# ensure we do not use a too advanced GL-version at config/build-time, which might not be available at run-time -preconfigopts = "export __EGL_VENDOR_LIBRARY_FILENAMES=${EBROOTOPENGL}/share/glvnd/egl_vendor.d/50_mesa.json && " -prebuildopts = "export __EGL_VENDOR_LIBRARY_FILENAMES=${EBROOTOPENGL}/share/glvnd/egl_vendor.d/50_mesa.json && " - -######################################################################################## -# check ParaView Superbuild options # -# https://gitlab.kitware.com/paraview/paraview-superbuild/tree/master # -# # -# check ParaView Build documenation # -# https://gitlab.kitware.com/paraview/paraview/blob/master/Documentation/dev/build.md # -######################################################################################## - -configopts = '-DCMAKE_POLICY_DEFAULT_CMP0074=OLD ' -configopts += '-DCMAKE_POLICY_DEFAULT_CMP0077=OLD ' - -# --- general settings --- # -configopts += '-DCMAKE_CXX_STANDARD=11 ' -configopts += '-DCXX_STANDARD_REQUIRED=ON ' -configopts += '-DCMAKE_CXX_EXTENSIONS=OFF ' # eg.-std=c++11 rather than -std=gnu++11 -configopts += '-DVTK_USE_CXX11_FEATURES=ON ' -# configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' - -configopts += '-DBUILD_TESTING=OFF ' # Without internet connection turn off testing -# Or consult https://gitlab.kitware.com/vtk/vtk/blob/master/Documentation/dev/git/data.md -# and download ExternalData to $EASYBUILD_SOURCEPATH and adjust -DExternalData_OBJECT_STORES accordingly -# configopts += '-DExternalData_OBJECT_STORES=%(builddir)s/ExternalData ' - -configopts += '-DBUILD_EXAMPLES=ON ' - -configopts += '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DZLIB_LIBRARY_DEBUG=$EBROOTZLIB/lib/libz.so ' - -# https://forum.openframeworks.cc/t/nvidia-drivers-pthreads-and-segfaults/2524 -configopts += '-DCMAKE_CXX_FLAGS="-lpthread $CMAKE_CXX_FLAGS" ' -configopts += '-DCMAKE_C_FLAGS="-lpthread $CMAKE_C_FLAGS" ' - -configopts += '-DPARAVIEW_BUILD_EDITION=CANONICAL ' -configopts += '-DPARAVIEW_BUILD_WITH_KITS=OFF ' -configopts += '-DPARAVIEW_USE_QT=OFF ' -configopts += '-DPARAVIEW_QT_VERSION=5 ' -configopts += '-DPARAVIEW_ENABLE_WEB=ON ' - -configopts += '-DPARAVIEW_SHARED_LIBS=ON ' -configopts += '-DPARAVIEW_USE_PYTHON=ON ' -configopts += '-DPARAVIEW_PYTHON_VERSION=3 ' -configopts += "-DPYTHON_EXECUTABLE=$EBROOTPYTHON/bin/python " - -configopts += '-DVTK_PYTHON_VERSION=3 ' -configopts += '-DVTK_NO_PYTHON_THREADS=OFF ' -configopts += '-DVTK_PYTHON_FULL_THREADSAFE=OFF ' # visibility depends on VTK_NO_PYTHON_THREADS=OFF -# If you pass VTK_PYTHON_FULL_THREADSAFE to true, then each and every call to python will be protected with GIL, -# ensuring that you can have eg. other python interpreter in your application and still use python wrapping in vtk. -# configopts += '-DUSE_EXTERNAL_VTK:BOOL=ON ' - -# --- parallel (on-node) --- # -# https://blog.kitware.com/simple-parallel-computing-with-vtksmptools-2/ -configopts += '-DVTK_SMP_IMPLEMENTATION_TYPE=OpenMP ' - -# --- parallel (distributed) --- # -configopts += '-DMPIEXEC_MAX_NUMPROCS=24 ' -configopts += '-DPARAVIEW_USE_MPI=ON ' - -# --- IO --- # -configopts += '-DXDMF_BUILD_MPI=ON ' -configopts += '-DPARAVIEW_ENABLE_XDMF3=ON ' - -# --- large data --- # -configopts += '-DVTK_USE_64BIT_IDS=ON ' -configopts += '-DVTK_USE_LARGE_DATA=ON ' - -# --- rendering --- # -configopts += '-DVTK_RENDERING_BACKEND:STRING=OpenGL2 ' - -# OpenGL (hardware) -# https://kitware.github.io/paraview-docs/latest/cxx/Offscreen.html -# If VTK_OPENGL_HAS_EGL or VTK_OPENGL_HAS_OSMESA is ON, the build supports headless rendering, -# otherwise VTK_USE_X must be ON and the build does not support headless, -# but can still support offscreen rendering. -# If VTK_USE_X is OFF, then either VTK_OPENGL_HAS_OSMESA or VTK_OPENGL_HAS_EGL must be ON. -# Then the build does not support onscreen rendering, but only headless rendering. -# If PARAVIEW_BUILD_QT_GUI is ON and VTK_USE_X is ON, while ParaView command line tools won't link against -# or use X calls, Qt will and hence an accessible X server is still needed to run the desktop client. -# If VTK_OPENGL_HAS_OSMESA is ON, and VTK_USE_X is ON, -# then all the OpenGL and OSMesa variables should point to the Mesa libraries. -# Likewise, if VTK_OPENGL_HAS_EGL is ON and VTK_USE_X is ON, then all the OpenGL and EGL variables -# should point to the system libraries providing both, typically the NVidia libraries. - -configopts += '-DOpenGL_GL_PREFERENCE=GLVND ' -configopts += '-DVTK_REPORT_OPENGL_ERRORS_IN_RELEASE_BUILDS=OFF ' - -configopts += "-DOPENGL_INCLUDE_DIR=${EBROOTOPENGL}/include " -configopts += "-DOPENGL_GLX_INCLUDE_DIR=${EBROOTOPENGL}/include " -configopts += "-DOPENGL_EGL_INCLUDE_DIR=${EBROOTOPENGL}/include " -# configopts += "-DOPENGL_xmesa_INCLUDE_DIR=IGNORE " - -configopts += "-DOPENGL_opengl_LIBRARY=${EBROOTOPENGL}/lib/libOpenGL.so.0 " -configopts += "-DOPENGL_gl_LIBRARY=${EBROOTOPENGL}/lib/libGL.so " -configopts += "-DOPENGL_glx_LIBRARY=${EBROOTOPENGL}/lib/libGLX.so.0 " -configopts += "-DOPENGL_glu_LIBRARY=${EBROOTOPENGL}/lib/libGLU.so " -configopts += "-DOPENGL_egl_LIBRARY=${EBROOTOPENGL}/lib/libEGL.so.1 " - -# OpenGL over X -# configopts += '-DVTK_USE_X=ON ' # OFF:headless rendering -# already considered by Qt (https://gitlab.kitware.com/lorensen/vtk/commit/b29f6db3f746d84f830c81e4212e48db192e4dbb) -# configopts += '-DVTK_DEFAULT_RENDER_WINDOW_OFFSCREEN=OFF ' -# configopts += '-DVTK_OPENGL_HAS_OSMESA=OFF ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D - -# EGL (off-screen rendering with OpenGL, but without the need for X) -# call pvserver with –egl-device-index=0 or 1 and –disable-xdisplay-test -configopts += '-DVTK_OPENGL_HAS_EGL=ON ' -configopts += '-DVTK_OPENGL_HAS_OSMESA=OFF ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D -configopts += '-DVTK_USE_X=OFF ' -configopts += '-DVTK_DEFAULT_EGL_DEVICE_INDEX=0 ' -# configopts += '-DEGL_INCLUDE_DIR=${EBROOTOPENGL}/include/EGL/ ' # https://www.khronos.org/registry/EGL/ -# configopts += '-DEGL_LIBRARY=${EBROOTOPENGL}/lib/libEGL.so.1 ' -# configopts += '-DEGL_opengl_LIBRARY=${EBROOTOPENGL}/lib/libOpenGL.so.0 ' -# configopts += '-DEGL_gldispatch_LIBRARY=${EBROOTOPENGL}/lib/libGLdispatch.so.0 ' # <path_to_libGLdispatch.so.0> - -# OSMesa (software) -# With OSMesa the DISPLAY variable has no meaning and is not needed -# When ON, implies that ParaView can use OSMesa to support headless modes of operation. -# configopts += '-DVTK_OPENGL_HAS_OSMESA=ON ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D -# configopts += '-DVTK_USE_X=OFF ' -# configopts += '-DVTK_DEFAULT_RENDER_WINDOW_OFFSCREEN=ON ' -# configopts += '-DOSMESA_INCLUDE_DIR=${EBROOTOPENGL}/include ' -# configopts += '-DOSMESA_LIBRARY=${EBROOTOPENGL}/lib/libOSMesa.so ' - -# Raytracing -configopts += '-DPARAVIEW_ENABLE_RAYTRACING=ON ' -configopts += '-DVTK_ENABLE_OSPRAY=ON ' -configopts += '-DVTK_ENABLE_VISRTX=OFF ' - -configopts += '-DPARAVIEW_USE_OSPRAY=ON ' -configopts += "-Dospray_DIR=${EBROOTOSPRAY} " -configopts += "-Dembree_DIR=${EBROOTEMBREE}/lib64/cmake/embree-3.8.0 " -configopts += '-DModule_vtkRenderingOSPRay=ON ' -configopts += '-DVTKOSPRAY_ENABLE_DENOISER=OFF ' - -# --- extra libs --- # -# configopts += "-DVTKm_DIR=$EBROOTVTKM/lib/cmake/vtkm-1.1/ " -configopts += '-DPARAVIEW_USE_VTKM=ON ' -configopts += '-DModule_vtkAcceleratorsVTKm=ON ' -configopts += '-DVTKm_Vectorization=AVX2 ' -configopts += '-DVTKm_ENABLE_OPENMP=ON ' -# configopts += '-DVTKm_ENABLE_MPI=ON ' -# configopts += '-DVTKm_ENABLE_LOGGING=ON ' -# configopts += '-DVTKm_ENABLE_CUDA=ON ' - -configopts += "-DFFMPEG_ROOT=$EBROOTFFMPEG " -configopts += '-DPARAVIEW_ENABLE_FFMPEG=ON ' -configopts += '-DModule_vtkIOFFMPEG=ON ' -configopts += '-DModule_vtkIOVideo=ON ' - -configopts += '-DModule_SignedTensor=ON ' - -configopts += '-DModule_vtkDICOM=ON ' - -# configopts += '-DModule_vtkFiltersMatlab=OFF ' -configopts += '-DModule_vtkFiltersReebGraph=ON ' -configopts += '-DModule_vtkFiltersSMP=ON ' -configopts += '-DModule_vtkFiltersSelection=ON ' -# configopts += '-DModule_vtkFiltersStatisticsGnu=OFF ' -configopts += '-DModule_vtkFiltersTopology=ON ' - -# --- coupling --- # -# configopts += '-DPARAVIEW_ENABLE_CATALYST=ON ' # variable is obsolete and no longer has any effect - -# --- development & testing --- # -configopts += '-DPARAVIEW_INSTALL_DEVELOPMENT_FILES=ON ' -configopts += '-DPARAVIEW_BUILD_DEVELOPER_DOCUMENTATION=OFF ' -configopts += '-DPARAVIEW_BUILD_EXAMPLES=OFF ' -configopts += '-DPARAVIEW_BUILD_TESTING=OFF ' -configopts += '-DPARAVIEW_BUILD_VTK_TESTING=OFF ' -configopts += '-DCTEST_TEST_TIMEOUT=10800 ' -configopts += '-DExternalData_TIMEOUT_INACTIVITY=0 ' - -# --- XDMF options --- # -configopts += '-DXDMF_USE_BZIP2=ON ' -configopts += '-DXDMF_USE_GZIP=ON ' - -# --- VTK external libraries --- # -configopts += '-DVTK_USE_SYSTEM_EXPAT=ON ' -configopts += '-DVTK_USE_SYSTEM_FREETYPE=ON ' -configopts += '-DVTK_USE_SYSTEM_HDF5=ON ' -configopts += '-DVTK_USE_SYSTEM_JPEG=ON ' -configopts += '-DVTK_USE_SYSTEM_LIBXML2=ON ' -configopts += '-DVTK_USE_SYSTEM_MPI4PY=ON ' -configopts += '-DVTK_USE_SYSTEM_NETCDF=ON ' -configopts += '-DVTK_USE_SYSTEM_PNG=ON ' -configopts += '-DVTK_USE_SYSTEM_PYGMENTS=ON ' -configopts += '-DVTK_USE_SYSTEM_SIX=ON ' -configopts += '-DVTK_USE_SYSTEM_TIFF=ON ' -configopts += '-DVTK_USE_SYSTEM_ZLIB=ON ' -configopts += '-DNETCDF_CXX_ROOT=$EBROOTNETCDFMINCPLUSPLUS ' -configopts += '-DNETCDF_F77_ROOT=$EBROOTNETCDFMINFORTRAN ' -configopts += '-DNETCDF_F90_ROOT=$EBROOTNETCDFMINFORTRAN ' - -# --- ParaView Extra-Reader --- # -configopts += '-DPARAVIEW_PLUGIN_ENABLE_ParFlow=ON ' - -configopts += '-DPARAVIEW_ENABLE_VISITBRIDGE=ON ' -configopts += '-DVISIT_BUILD_READER_Nek5000=ON ' -# configopts += '-DVISIT_BUILD_READER_Boxlib3D=ON ' # req. external dependency -# configopts += '-DVISIT_BUILD_READER_Mili=ON ' # req. external dependency -# configopts += '-DVISIT_BUILD_READER_Silo=ON ' # req. external dependency - -# --- ParaView Plugin Autoload --- # -# configopts += '-DPARAVIEW_AUTOLOAD_PLUGIN_AnalyzeNIfTIIO=ON ' -# configopts += '-DPARAVIEW_AUTOLOAD_PLUGIN_SurfaceLIC=ON ' # can result in error -# ( 489.245s) [paraview ]vtkOpenGLVertexArrayObj:293 ERR| vtkOpenGLVertexArrayObject (0x3cd7bc0) -# ( 489.245s) [paraview ]vtkOpenGLRenderUtilitie:78 WARN| Error setting 'vertexMC' in shader VAO. -# https://www.paraview.org/pipermail/paraview/2016-March/036518.html - -# --- infos -# -- Plugin: AcceleratedAlgorithms - Testing plugin : Enabled -# -- Plugin: AdiosReader - Reader for *.bp files based on Adios : Disabled -# -- Plugin: AdiosStagingReader - Performs staging reads from simulations using ADIOS : Disabled -# -- Plugin: AnalyzeNIfTIIO - Reader/Writer for Analyze and NifTI files : Enabled -# -- Plugin: ArrowGlyph - Glyph with customizable Arrows : Enabled -# -- Plugin: CDIReader - ICON netCDF/CDI Reader : Enabled -# -- Plugin: DigitalRockPhysics - Digital Rock Physics analysis filters : Enabled -# -- Plugin: EmbossingRepresentations - Embossing representations : Enabled -# -- Plugin: EyeDomeLighting - Add 3D View with eye-dome Lighting support : Enabled -# -- Plugin: GMVReader - Reader for binary or ASCII files stored in General Mesh Viewer file format : Enabled -# -- Plugin: GenericIOReader - GenericIO Reader for HACC data : Enabled -# -- Plugin: GeodesicMeasurement - Geodesic Measurement : Enabled -# -- Plugin: GmshReader - Reader for visualization of high-order polynomial solutions under the Gmsh format : off -# -- Plugin: InSituExodus - Experimental memory-conserving Exodus II file reader. : Disabled -# -- Plugin: LagrangianParticleTracker - Highly customizable filter for particle tracking : Enabled -# -- Plugin: MooseXfemClip - Clip partial elements generated by MOOSE XFEM : Enabled -# -- Plugin: Moments - Filters for Flux and Circulation Fields : Enabled -# -- Plugin: NetCDFTimeAnnotationPlugin - Provides NetCDF Time Annotation filter : Enabled -# netcdftime Python module not found! NetCDFTimeAnnotationPlugin not be available until it is installed. -# -- Plugin 'NetCDFTimeAnnotationPlugin' lists plugin library named 'NetCDFTimeAnnotationPlugin' -# -- Plugin: NonOrthogonalSource - Non Orthogonal Source code sample : Enabled -# -- Plugin: OpenVR - OpenVR Support : Disabled -# -- Plugin: PythonQtPlugin - PythonQt Plugin : Disabled -# -- Plugin: SLACTools - SLAC Tools : Enabled -# -- Plugin: SierraPlotTools - Sierra Plotting Tools : Enabled -# -- Plugin: StreamLinesRepresentation - Add animated Stream Lines representation for any type of dataset : Enabled -# -- Plugin: StreamingParticles - Render Particles with Streaming : Enabled -# -- Plugin: SurfaceLIC - Add Surface-LIC vector visualization support : Enabled -# -- Plugin: TemporalParallelismScriptGenerator - Plugin for creating Python spatio-temporal processing scripts : off -# -- Plugin: PacMan - Testing plugin : Enabled -# -- Plugin: ThickenLayeredCells - Filter to thicken layered cells : Enabled -# -- Plugin: VRPlugin - Virtual Reality Devices and Interactor styles : Disabled -# -- Plugin: VTKmFilters - VTKm many-core filters : Enabled -# -- Plugin: VaporPlugin - Plugin to read NCAR VDR files : Disabled -# -- Plugin: pvNVIDIAIndeX - Plugin for NVIDIA IndeX : Enabled - -# ######################################################### -# The ParaView server can be cranky, test downloads are quite often failing -# Using ; insted of && gives a second chance to download the test files, if the first serial attempt would fail. -# prebuildopts = 'make VTKData ;' # only if: configopts += '-DBUILD_TESTING=ON' - -postinstallcmds = ['python -m compileall %(installdir)s/lib64/python3.6/site-packages/'] -# 'cp -a %(builddir)s/ParaView-v%(version)s/ %(installdir)s/src', # copy source from build dir to install dir -# '', # move debug info to separate files: -# http://stackoverflow.com/questions/866721/how-to-generate-gcc-debug-symbol-outside-the-build-target -# '', # debugedit -i --base-dir=%(builddir)s/ParaView-v%(version)s --dest-dir= %(installdir)s/src <file.debug> -# # change path to source in debug info - -modextravars = {'CUDA_VISIBLE_DEVICES': '0,1'} - -# OpenSWR fully supports OpenGL 3.0 and most of 3.3, but ParaView requires 3.3 -> clame to fully support 3.3 -modextravars = {'MESA_GL_VERSION_OVERRIDE': '3.3'} -modextravars = {'MESA_GLSL_VERSION_OVERRIDE': '330'} - -modextravars = { - # OpenMP will choose an optimum number of threads by default, which is usually the number of cores - # 'OMP_NUM_THREADS': '28', # fix number of threads used by paraview filters and parallel sections in the code - # threads used by ospray - details https://github.com/ospray/ospray/blob/release-2.0.x/ospray/api/Device.cpp#L88 - # unset => OSPRAY uses all hardware threads - # 'OSPRAY_THREADS': '14', # OSPRay < 2.0 - # 'OSPRAY_NUM_THREADS': '14', # OSPRay >= 2.0 - # When TBB is used for OSPRAY: tbb::task_scheduler_init::default_num_threads() is default if no OSPRAY_NUM_THREADS - # https://github.com/ospray/ospcommon/blob/master/ospcommon/tasking/detail/tasking_system_init.cpp#L47 - # https://www.threadingbuildingblocks.org/docs/doxygen/a00150.html - # more ospray definitions: https://www.ospray.org/documentation.html#environment-variables - 'KNOB_MAX_WORKER_THREADS': '65535', # max. threads used by OpenSWR (limited by number of hardware threads) - # details in https://gitlab.version.fz-juelich.de/vis/vis-software/issues/14 - # more knob defs: https://github.com/mesa3d/mesa/blob/master/src/gallium/docs/source/drivers/openswr/knobs.rst -} - -modextrapaths = {'PYTHONPATH': 'lib64/python%(pyshortver)s/site-packages'} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/ParaView/ParaView-5.8.1-gpsmkl-2020-Python-3.8.5.eb b/Golden_Repo/p/ParaView/ParaView-5.8.1-gpsmkl-2020-Python-3.8.5.eb deleted file mode 100644 index ee76609fb6a4922bf545a5327d72f0b8244e8abc..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParaView/ParaView-5.8.1-gpsmkl-2020-Python-3.8.5.eb +++ /dev/null @@ -1,355 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ParaView' -version = '5.8.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = "http://www.paraview.org" -description = """Paraview is a scientific parallel visualizer. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -local_dwnlsfx_src = 'download.php?submit=Download&version=v%(version_major_minor)s&type=source&os=Sources&downloadFile=' -local_dwnlsfx_dat = 'download.php?submit=Download&version=v%(version_major_minor)s&type=data&os=Sources&downloadFile=' -local_dwnlsfx_tdat = 'download.php?submit=Download&version=v%(version_major_minor)s&type=data&os=Sources&downloadFile=' - -source_urls = [('http://www.paraview.org/paraview-downloads/%s' % local_dwnlsfx_src), - ('http://www.paraview.org/paraview-downloads/%s' % local_dwnlsfx_dat)] -# ('http://www.paraview.org/paraview-downloads/%s' % local_dwnlsfx_tdat), - -sources = [("ParaView-v%(version)s.tar.gz"), - ("ParaViewData-v%(version)s.tar.gz")] -# ("ParaViewTestingData-v%(version)s.tar.gz"), - -checksums = [('sha256', '384241fc69bbdb412465dcb5ef88b88ee7ae0e92e7d7cde53be54b19296cf9fa'), - ('sha256', '9bf8f6acc4533520143ba2e19a04f6c34b1537c0b9c95d8e975623eafe3acea9')] - -patches = [ - ("python-5.8.1-simpleCatalystSupport_0.4.patch"), -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('git', '2.28.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Boost', '1.74.0', '', ('gpsmpi', '2020')), - ('X11', '20200222'), - ('bzip2', '1.0.8'), - ('HDF5', '1.10.6'), - ('FFmpeg', '4.3.1'), - ('Embree', '3.8.0'), - ('OSPRay', '1.8.4'), - ('libpng', '1.6.37'), - ('expat', '2.2.9'), - ('freetype', '2.10.1'), - ('libjpeg-turbo', '2.0.5'), - ('libxml2', '2.9.10'), - ('LibTIFF', '4.1.0'), - ('zlib', '1.2.11'), - ('netCDF', '4.7.4'), - ('netCDF-C++4', '4.3.1'), - ('netCDF-Fortran', '4.5.3'), - ('mpi4py', '3.0.3', '-Python-%(pyver)s'), - ('nlohmann-json', '3.9.1'), # for ParFlow plugin - # ('VTK', '8.2.0', '-Python-%(pyver)s', ('gcccoremkl', '8.3.0-2019.3.199')), - # ('VTKm','1.1.0','-AVX2'), - ('Qt5', '5.14.2'), - ('SciPy-Stack', '2020', '-Python-%(pyver)s', ('gcccoremkl', '9.3.0-2020.2.254')), - ('VirtualGL', '2.6.4'), - ('OpenGL', '2020'), -] - -separate_build_dir = True -# parallel = 24 - -######################################################################################## -# check ParaView Superbuild options # -# https://gitlab.kitware.com/paraview/paraview-superbuild/tree/master # -# # -# check ParaView Build documenation # -# https://gitlab.kitware.com/paraview/paraview/blob/master/Documentation/dev/build.md # -######################################################################################## - -configopts = '-DCMAKE_POLICY_DEFAULT_CMP0074=OLD ' -configopts += '-DCMAKE_POLICY_DEFAULT_CMP0077=OLD ' - -# --- general settings --- # -configopts += '-DCMAKE_CXX_STANDARD=11 ' -configopts += '-DCXX_STANDARD_REQUIRED=ON ' -configopts += '-DCMAKE_CXX_EXTENSIONS=OFF ' # eg.-std=c++11 rather than -std=gnu++11 -configopts += '-DVTK_USE_CXX11_FEATURES=ON ' -# configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' - -configopts += '-DBUILD_TESTING=OFF ' # Without internet connection turn off testing -# Or consult https://gitlab.kitware.com/vtk/vtk/blob/master/Documentation/dev/git/data.md -# and download ExternalData to $EASYBUILD_SOURCEPATH and adjust -DExternalData_OBJECT_STORES accordingly -# configopts += '-DExternalData_OBJECT_STORES=%(builddir)s/ExternalData ' - -configopts += '-DBUILD_EXAMPLES=ON ' - -configopts += '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DZLIB_LIBRARY_DEBUG=$EBROOTZLIB/lib/libz.so ' - -# https://forum.openframeworks.cc/t/nvidia-drivers-pthreads-and-segfaults/2524 -configopts += '-DCMAKE_CXX_FLAGS="-lpthread $CMAKE_CXX_FLAGS" ' -configopts += '-DCMAKE_C_FLAGS="-lpthread $CMAKE_C_FLAGS" ' - -configopts += '-DPARAVIEW_BUILD_EDITION=CANONICAL ' -configopts += '-DPARAVIEW_BUILD_WITH_KITS=OFF ' -configopts += '-DPARAVIEW_USE_QT=ON ' -configopts += '-DPARAVIEW_QT_VERSION=5 ' -configopts += '-DPARAVIEW_ENABLE_WEB=ON ' - -configopts += '-DPARAVIEW_SHARED_LIBS=ON ' -configopts += '-DPARAVIEW_USE_PYTHON=ON ' -configopts += '-DPARAVIEW_PYTHON_VERSION=3 ' -configopts += "-DPYTHON_EXECUTABLE=$EBROOTPYTHON/bin/python " - -configopts += '-DVTK_PYTHON_VERSION=3 ' -configopts += '-DVTK_NO_PYTHON_THREADS=OFF ' -configopts += '-DVTK_PYTHON_FULL_THREADSAFE=OFF ' # visibility depends on VTK_NO_PYTHON_THREADS=OFF -# If you pass VTK_PYTHON_FULL_THREADSAFE to true, then each and every call to python will be protected with GIL, -# ensuring that you can have eg. other python interpreter in your application and still use python wrapping in vtk. -# configopts += '-DUSE_EXTERNAL_VTK:BOOL=ON ' - -# --- parallel (on-node) --- # -# https://blog.kitware.com/simple-parallel-computing-with-vtksmptools-2/ -configopts += '-DVTK_SMP_IMPLEMENTATION_TYPE=OpenMP ' - -# --- parallel (distributed) --- # -configopts += '-DMPIEXEC_MAX_NUMPROCS=24 ' -configopts += '-DPARAVIEW_USE_MPI=ON ' - -# --- IO --- # -configopts += '-DXDMF_BUILD_MPI=ON ' -configopts += '-DPARAVIEW_ENABLE_XDMF3=ON ' - -# --- large data --- # -configopts += '-DVTK_USE_64BIT_IDS=ON ' -configopts += '-DVTK_USE_LARGE_DATA=ON ' - -# --- rendering --- # -configopts += '-DVTK_RENDERING_BACKEND:STRING=OpenGL2 ' - -# OpenGL (hardware) -# https://kitware.github.io/paraview-docs/latest/cxx/Offscreen.html -# If VTK_OPENGL_HAS_EGL or VTK_OPENGL_HAS_OSMESA is ON, the build supports headless rendering, -# otherwise VTK_USE_X must be ON and the build does not support headless, -# but can still support offscreen rendering. -# If VTK_USE_X is OFF, then either VTK_OPENGL_HAS_OSMESA or VTK_OPENGL_HAS_EGL must be ON. -# Then the build does not support onscreen rendering, but only headless rendering. -# If PARAVIEW_BUILD_QT_GUI is ON and VTK_USE_X is ON, while ParaView command line tools won't link against -# or use X calls, Qt will and hence an accessible X server is still needed to run the desktop client. -# If VTK_OPENGL_HAS_OSMESA is ON, and VTK_USE_X is ON, -# then all the OpenGL and OSMesa variables should point to the Mesa libraries. -# Likewise, if VTK_OPENGL_HAS_EGL is ON and VTK_USE_X is ON, then all the OpenGL and EGL variables -# should point to the system libraries providing both, typically the NVidia libraries. - -configopts += '-DOpenGL_GL_PREFERENCE=GLVND ' -configopts += '-DVTK_REPORT_OPENGL_ERRORS_IN_RELEASE_BUILDS=OFF ' - -configopts += "-DOPENGL_INCLUDE_DIR=${EBROOTOPENGL}/include " -configopts += "-DOPENGL_GLX_INCLUDE_DIR=${EBROOTOPENGL}/include " -configopts += "-DOPENGL_EGL_INCLUDE_DIR=${EBROOTOPENGL}/include " -# configopts += "-DOPENGL_xmesa_INCLUDE_DIR=IGNORE " - -configopts += "-DOPENGL_opengl_LIBRARY=${EBROOTOPENGL}/lib/libOpenGL.so.0 " -configopts += "-DOPENGL_gl_LIBRARY=${EBROOTOPENGL}/lib/libGL.so " -configopts += "-DOPENGL_glx_LIBRARY=${EBROOTOPENGL}/lib/libGLX.so.0 " -configopts += "-DOPENGL_glu_LIBRARY=${EBROOTOPENGL}/lib/libGLU.so " -configopts += "-DOPENGL_egl_LIBRARY=${EBROOTOPENGL}/lib/libEGL.so.1 " - -# OpenGL over X -configopts += '-DVTK_USE_X=ON ' # OFF:headless rendering -# already considered by Qt (https://gitlab.kitware.com/lorensen/vtk/commit/b29f6db3f746d84f830c81e4212e48db192e4dbb) -configopts += '-DVTK_DEFAULT_RENDER_WINDOW_OFFSCREEN=OFF ' -configopts += '-DVTK_OPENGL_HAS_OSMESA=OFF ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D - -# EGL (off-screen rendering with OpenGL, but without the need for X) -# call pvserver with –egl-device-index=0 or 1 and –disable-xdisplay-test -# configopts += '-DVTK_OPENGL_HAS_EGL=ON ' -# configopts += '-DVTK_OPENGL_HAS_OSMESA=OFF ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D -# configopts += '-DVTK_USE_X=OFF ' -# configopts += '-DVTK_DEFAULT_EGL_DEVICE_INDEX=0 ' -# #configopts += '-DEGL_INCLUDE_DIR=${EBROOTOPENGL}/include/EGL/ ' # https://www.khronos.org/registry/EGL/ -# #configopts += '-DEGL_LIBRARY=${EBROOTOPENGL}/lib/libEGL.so.1 ' -# #configopts += '-DEGL_opengl_LIBRARY=${EBROOTOPENGL}/lib/libOpenGL.so.0 ' -# #configopts += '-DEGL_gldispatch_LIBRARY=${EBROOTOPENGL}/lib/libGLdispatch.so.0 ' # <path_to_libGLdispatch.so.0> - -# OSMesa (software) -# With OSMesa the DISPLAY variable has no meaning and is not needed -# When ON, implies that ParaView can use OSMesa to support headless modes of operation. -# configopts += '-DVTK_OPENGL_HAS_OSMESA=ON ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D -# configopts += '-DVTK_USE_X=OFF ' -# configopts += '-DVTK_DEFAULT_RENDER_WINDOW_OFFSCREEN=ON ' -# configopts += '-DOSMESA_INCLUDE_DIR=${EBROOTOPENGL}/include ' -# configopts += '-DOSMESA_LIBRARY=${EBROOTOPENGL}/lib/libOSMesa.so ' - -# Raytracing -configopts += '-DPARAVIEW_ENABLE_RAYTRACING=ON ' -configopts += '-DVTK_ENABLE_OSPRAY=ON ' -configopts += '-DVTK_ENABLE_VISRTX=OFF ' - -configopts += '-DPARAVIEW_USE_OSPRAY=ON ' -configopts += "-Dospray_DIR=${EBROOTOSPRAY} " -configopts += "-Dembree_DIR=${EBROOTEMBREE}/lib64/cmake/embree-3.8.0 " -configopts += '-DModule_vtkRenderingOSPRay=ON ' -configopts += '-DVTKOSPRAY_ENABLE_DENOISER=OFF ' - -# --- extra libs --- # -# configopts += "-DVTKm_DIR=$EBROOTVTKM/lib/cmake/vtkm-1.1/ " -configopts += '-DPARAVIEW_USE_VTKM=ON ' -configopts += '-DModule_vtkAcceleratorsVTKm=ON ' -configopts += '-DVTKm_Vectorization=AVX2 ' -configopts += '-DVTKm_ENABLE_OPENMP=ON ' -# configopts += '-DVTKm_ENABLE_MPI=ON ' -# configopts += '-DVTKm_ENABLE_LOGGING=ON ' -# configopts += '-DVTKm_ENABLE_CUDA=ON ' - -configopts += "-DFFMPEG_ROOT=$EBROOTFFMPEG " -configopts += '-DPARAVIEW_ENABLE_FFMPEG=ON ' -configopts += '-DModule_vtkIOFFMPEG=ON ' -configopts += '-DModule_vtkIOVideo=ON ' - -configopts += '-DModule_SignedTensor=ON ' - -configopts += '-DModule_vtkDICOM=ON ' - -# configopts += '-DModule_vtkFiltersMatlab=OFF ' -configopts += '-DModule_vtkFiltersReebGraph=ON ' -configopts += '-DModule_vtkFiltersSMP=ON ' -configopts += '-DModule_vtkFiltersSelection=ON ' -# configopts += '-DModule_vtkFiltersStatisticsGnu=OFF ' -configopts += '-DModule_vtkFiltersTopology=ON ' - -# --- coupling --- # -# configopts += '-DPARAVIEW_ENABLE_CATALYST=ON ' # variable is obsolete and no longer has any effect - -# --- development & testing --- # -configopts += '-DPARAVIEW_INSTALL_DEVELOPMENT_FILES=ON ' -configopts += '-DPARAVIEW_BUILD_DEVELOPER_DOCUMENTATION=OFF ' -configopts += '-DPARAVIEW_BUILD_EXAMPLES=OFF ' -configopts += '-DPARAVIEW_BUILD_TESTING=OFF ' -configopts += '-DPARAVIEW_BUILD_VTK_TESTING=OFF ' -configopts += '-DCTEST_TEST_TIMEOUT=10800 ' -configopts += '-DExternalData_TIMEOUT_INACTIVITY=0 ' - -# --- XDMF options --- # -configopts += '-DXDMF_USE_BZIP2=ON ' -configopts += '-DXDMF_USE_GZIP=ON ' - -# --- VTK external libraries --- # -configopts += '-DVTK_USE_SYSTEM_EXPAT=ON ' -configopts += '-DVTK_USE_SYSTEM_FREETYPE=ON ' -configopts += '-DVTK_USE_SYSTEM_HDF5=ON ' -configopts += '-DVTK_USE_SYSTEM_JPEG=ON ' -configopts += '-DVTK_USE_SYSTEM_LIBXML2=ON ' -configopts += '-DVTK_USE_SYSTEM_MPI4PY=ON ' -configopts += '-DVTK_USE_SYSTEM_NETCDF=ON ' -configopts += '-DVTK_USE_SYSTEM_PNG=ON ' -configopts += '-DVTK_USE_SYSTEM_PYGMENTS=ON ' -configopts += '-DVTK_USE_SYSTEM_SIX=ON ' -configopts += '-DVTK_USE_SYSTEM_TIFF=ON ' -configopts += '-DVTK_USE_SYSTEM_ZLIB=ON ' -configopts += '-DNETCDF_CXX_ROOT=$EBROOTNETCDFMINCPLUSPLUS ' -configopts += '-DNETCDF_F77_ROOT=$EBROOTNETCDFMINFORTRAN ' -configopts += '-DNETCDF_F90_ROOT=$EBROOTNETCDFMINFORTRAN ' - -# --- ParaView Extra-Reader --- # -configopts += '-DPARAVIEW_PLUGIN_ENABLE_ParFlow=ON ' - -# https://gitlab.kitware.com/paraview/visitbridge/-/blob/master/databases/CMakeLists.txt -configopts += '-DPARAVIEW_ENABLE_VISITBRIDGE=ON ' -configopts += '-DVISIT_BUILD_READER_Nek5000=ON ' -# configopts += '-DVISIT_BUILD_READER_Boxlib3D=ON ' # req. external dependency -# configopts += '-DVISIT_BUILD_READER_Mili=ON ' # req. external dependency -# configopts += '-DVISIT_BUILD_READER_Silo=ON ' # req. external dependency - -# --- ParaView Plugin Autoload --- # -# configopts += '-DPARAVIEW_AUTOLOAD_PLUGIN_AnalyzeNIfTIIO=ON ' -# configopts += '-DPARAVIEW_AUTOLOAD_PLUGIN_SurfaceLIC=ON ' # can result in error -# ( 489.245s) [paraview ]vtkOpenGLVertexArrayObj:293 ERR| vtkOpenGLVertexArrayObject (0x3cd7bc0) -# ( 489.245s) [paraview ]vtkOpenGLRenderUtilitie:78 WARN| Error setting 'vertexMC' in shader VAO. -# https://www.paraview.org/pipermail/paraview/2016-March/036518.html - -# --- infos -# -- Plugin: AcceleratedAlgorithms - Testing plugin : Enabled -# -- Plugin: AdiosReader - Reader for *.bp files based on Adios : Disabled -# -- Plugin: AdiosStagingReader - Performs staging reads from simulations using ADIOS : Disabled -# -- Plugin: AnalyzeNIfTIIO - Reader/Writer for Analyze and NifTI files : Enabled -# -- Plugin: ArrowGlyph - Glyph with customizable Arrows : Enabled -# -- Plugin: CDIReader - ICON netCDF/CDI Reader : Enabled -# -- Plugin: DigitalRockPhysics - Digital Rock Physics analysis filters : Enabled -# -- Plugin: EmbossingRepresentations - Embossing representations : Enabled -# -- Plugin: EyeDomeLighting - Add 3D View with eye-dome Lighting support : Enabled -# -- Plugin: GMVReader - Reader for binary or ASCII files stored in General Mesh Viewer file format : Enabled -# -- Plugin: GenericIOReader - GenericIO Reader for HACC data : Enabled -# -- Plugin: GeodesicMeasurement - Geodesic Measurement : Enabled -# -- Plugin: GmshReader - Reader for visualization of high-order polynomial solutions under the Gmsh format : off -# -- Plugin: InSituExodus - Experimental memory-conserving Exodus II file reader. : Disabled -# -- Plugin: LagrangianParticleTracker - Highly customizable filter for particle tracking : Enabled -# -- Plugin: MooseXfemClip - Clip partial elements generated by MOOSE XFEM : Enabled -# -- Plugin: Moments - Filters for Flux and Circulation Fields : Enabled -# -- Plugin: NetCDFTimeAnnotationPlugin - Provides NetCDF Time Annotation filter : Enabled -# netcdftime Python module not found! NetCDFTimeAnnotationPlugin not be available until it is installed. -# -- Plugin 'NetCDFTimeAnnotationPlugin' lists plugin library named 'NetCDFTimeAnnotationPlugin' -# -- Plugin: NonOrthogonalSource - Non Orthogonal Source code sample : Enabled -# -- Plugin: OpenVR - OpenVR Support : Disabled -# -- Plugin: PythonQtPlugin - PythonQt Plugin : Disabled -# -- Plugin: SLACTools - SLAC Tools : Enabled -# -- Plugin: SierraPlotTools - Sierra Plotting Tools : Enabled -# -- Plugin: StreamLinesRepresentation - Add animated Stream Lines representation for any type of dataset : Enabled -# -- Plugin: StreamingParticles - Render Particles with Streaming : Enabled -# -- Plugin: SurfaceLIC - Add Surface-LIC vector visualization support : Enabled -# -- Plugin: TemporalParallelismScriptGenerator - Plugin for creating Python spatio-temporal processing scripts : off -# -- Plugin: PacMan - Testing plugin : Enabled -# -- Plugin: ThickenLayeredCells - Filter to thicken layered cells : Enabled -# -- Plugin: VRPlugin - Virtual Reality Devices and Interactor styles : Disabled -# -- Plugin: VTKmFilters - VTKm many-core filters : Enabled -# -- Plugin: VaporPlugin - Plugin to read NCAR VDR files : Disabled -# -- Plugin: pvNVIDIAIndeX - Plugin for NVIDIA IndeX : Enabled - -# ######################################################### -# The ParaView server can be cranky, test downloads are quite often failing -# Using ; insted of && gives a second chance to download the test files, if the first serial attempt would fail. -# prebuildopts = 'make VTKData ;' # only if: configopts += '-DBUILD_TESTING=ON' - -postinstallcmds = ['python -m compileall %(installdir)s/lib64/python3.6/site-packages/'] -# 'cp -a %(builddir)s/ParaView-v%(version)s/ %(installdir)s/src', # copy source from build dir to install dir -# '', # move debug info to separate files: -# http://stackoverflow.com/questions/866721/how-to-generate-gcc-debug-symbol-outside-the-build-target -# '', # debugedit -i --base-dir=%(builddir)s/ParaView-v%(version)s --dest-dir= %(installdir)s/src <file.debug> -# # change path to source in debug info - -modextravars = {'CUDA_VISIBLE_DEVICES': '0,1'} - -# OpenSWR fully supports OpenGL 3.0 and most of 3.3, but ParaView requires 3.3 -> clame to fully support 3.3 -modextravars = {'MESA_GL_VERSION_OVERRIDE': '3.3'} -modextravars = {'MESA_GLSL_VERSION_OVERRIDE': '330'} - -modextravars = { - # OpenMP will choose an optimum number of threads by default, which is usually the number of cores - # 'OMP_NUM_THREADS': '28', # fix number of threads used by paraview filters and parallel sections in the code - # threads used by ospray - details https://github.com/ospray/ospray/blob/release-2.0.x/ospray/api/Device.cpp#L88 - # unset => OSPRAY uses all hardware threads - # 'OSPRAY_THREADS': '14', # OSPRay < 2.0 - # 'OSPRAY_NUM_THREADS': '14', # OSPRay >= 2.0 - # When TBB is used for OSPRAY: tbb::task_scheduler_init::default_num_threads() is default if no OSPRAY_NUM_THREADS - # https://github.com/ospray/ospcommon/blob/master/ospcommon/tasking/detail/tasking_system_init.cpp#L47 - # https://www.threadingbuildingblocks.org/docs/doxygen/a00150.html - # more ospray definitions: https://www.ospray.org/documentation.html#environment-variables - 'KNOB_MAX_WORKER_THREADS': '65535', # max. threads used by OpenSWR (limited by number of hardware threads) - # details in https://gitlab.version.fz-juelich.de/vis/vis-software/issues/14 - # more knob defs: https://github.com/mesa3d/mesa/blob/master/src/gallium/docs/source/drivers/openswr/knobs.rst -} - -modextrapaths = {'PYTHONPATH': 'lib64/python%(pyshortver)s/site-packages'} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/ParaView/ParaView-5.9.1-EGL-gpsmkl-2021-Python-3.8.5.eb b/Golden_Repo/p/ParaView/ParaView-5.9.1-EGL-gpsmkl-2021-Python-3.8.5.eb deleted file mode 100644 index 056de696f78412a560eae3b85a9a67e1c8cfe990..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParaView/ParaView-5.9.1-EGL-gpsmkl-2021-Python-3.8.5.eb +++ /dev/null @@ -1,344 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ParaView' -version = '5.9.1' -versionsuffix = '-EGL-Python-%(pyver)s' - -homepage = "http://www.paraview.org" -description = """Paraview is a scientific parallel visualizer. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -local_dwnlsfx_src = 'download.php?submit=Download&version=v%(version_major_minor)s&type=source&os=Sources&downloadFile=' -source_urls = [('http://www.paraview.org/paraview-downloads/%s' % local_dwnlsfx_src)] -sources = [("ParaView-v%(version)s.tar.gz")] -checksums = [('sha256', 'efbcba00ba38c23d0ada1bde7144a8745caa308d9e1f94a4a71d8af63732266f')] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('git', '2.28.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Boost', '1.74.0', '', ('gpsmpi', '2020')), - ('X11', '20200222'), - ('bzip2', '1.0.8'), - ('HDF5', '1.10.6'), - ('FFmpeg', '4.3.1'), - ('Embree', '3.12.2'), - ('OSPRay', '2.6.0'), - ('libpng', '1.6.37'), - ('expat', '2.2.9'), - ('freetype', '2.10.1'), - ('libjpeg-turbo', '2.0.5'), - ('libxml2', '2.9.10'), - ('LibTIFF', '4.1.0'), - ('zlib', '1.2.11'), - ('netCDF', '4.7.4'), - ('netCDF-C++4', '4.3.1'), - ('netCDF-Fortran', '4.5.3'), - ('mpi4py', '3.0.3', '-Python-%(pyver)s'), - ('nlohmann-json', '3.9.1'), # for ParFlow plugin - # ('VTK', '8.2.0', '-Python-%(pyver)s', ('gcccoremkl', '8.3.0-2019.3.199')), - # ('VTKm','1.1.0','-AVX2'), - ('Qt5', '5.14.2'), - ('SciPy-Stack', '2021', '-Python-%(pyver)s', ('gcccoremkl', '10.3.0-2021.2.0')), - ('VirtualGL', '2.6.4'), - ('OpenGL', '2020'), -] - -separate_build_dir = True -# parallel = 24 - -# ensure we do not use a too advanced GL-version at config/build-time, which might not be available at run-time -preconfigopts = "export __EGL_VENDOR_LIBRARY_FILENAMES=${EBROOTOPENGL}/share/glvnd/egl_vendor.d/50_mesa.json && " -prebuildopts = "export __EGL_VENDOR_LIBRARY_FILENAMES=${EBROOTOPENGL}/share/glvnd/egl_vendor.d/50_mesa.json && " - -######################################################################################## -# check ParaView Superbuild options # -# https://gitlab.kitware.com/paraview/paraview-superbuild/tree/master # -# # -# check ParaView Build documenation # -# https://gitlab.kitware.com/paraview/paraview/blob/master/Documentation/dev/build.md # -######################################################################################## - -configopts = '-DCMAKE_POLICY_DEFAULT_CMP0074=OLD ' -configopts += '-DCMAKE_POLICY_DEFAULT_CMP0077=OLD ' - -# --- general settings --- # -configopts += '-DCMAKE_CXX_STANDARD=11 ' -configopts += '-DCXX_STANDARD_REQUIRED=ON ' -configopts += '-DCMAKE_CXX_EXTENSIONS=OFF ' # eg.-std=c++11 rather than -std=gnu++11 -configopts += '-DVTK_USE_CXX11_FEATURES=ON ' -# configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' - -configopts += '-DBUILD_TESTING=OFF ' # Without internet connection turn off testing -# Or consult https://gitlab.kitware.com/vtk/vtk/blob/master/Documentation/dev/git/data.md -# and download ExternalData to $EASYBUILD_SOURCEPATH and adjust -DExternalData_OBJECT_STORES accordingly -# configopts += '-DExternalData_OBJECT_STORES=%(builddir)s/ExternalData ' - -configopts += '-DBUILD_EXAMPLES=ON ' - -configopts += '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DZLIB_LIBRARY_DEBUG=$EBROOTZLIB/lib/libz.so ' - -# https://forum.openframeworks.cc/t/nvidia-drivers-pthreads-and-segfaults/2524 -configopts += '-DCMAKE_CXX_FLAGS="-lpthread $CMAKE_CXX_FLAGS" ' -configopts += '-DCMAKE_C_FLAGS="-lpthread $CMAKE_C_FLAGS" ' - -configopts += '-DPARAVIEW_BUILD_EDITION=CANONICAL ' -configopts += '-DPARAVIEW_BUILD_WITH_KITS=OFF ' -configopts += '-DPARAVIEW_USE_QT=OFF ' -configopts += '-DPARAVIEW_QT_VERSION=5 ' -configopts += '-DPARAVIEW_ENABLE_WEB=ON ' - -configopts += '-DPARAVIEW_SHARED_LIBS=ON ' -configopts += '-DPARAVIEW_USE_PYTHON=ON ' -configopts += '-DPARAVIEW_PYTHON_VERSION=3 ' -configopts += "-DPYTHON_EXECUTABLE=$EBROOTPYTHON/bin/python " - -configopts += '-DVTK_PYTHON_VERSION=3 ' -configopts += '-DVTK_NO_PYTHON_THREADS=OFF ' -configopts += '-DVTK_PYTHON_FULL_THREADSAFE=OFF ' # visibility depends on VTK_NO_PYTHON_THREADS=OFF -# If you pass VTK_PYTHON_FULL_THREADSAFE to true, then each and every call to python will be protected with GIL, -# ensuring that you can have eg. other python interpreter in your application and still use python wrapping in vtk. -# configopts += '-DUSE_EXTERNAL_VTK:BOOL=ON ' - -# --- parallel (on-node) --- # -# https://blog.kitware.com/simple-parallel-computing-with-vtksmptools-2/ -configopts += '-DVTK_SMP_IMPLEMENTATION_TYPE=OpenMP ' - -# --- parallel (distributed) --- # -configopts += '-DMPIEXEC_MAX_NUMPROCS=24 ' -configopts += '-DPARAVIEW_USE_MPI=ON ' - -# --- IO --- # -configopts += '-DXDMF_BUILD_MPI=ON ' -configopts += '-DPARAVIEW_ENABLE_XDMF3=ON ' - -# --- large data --- # -configopts += '-DVTK_USE_64BIT_IDS=ON ' -configopts += '-DVTK_USE_LARGE_DATA=ON ' - -# --- rendering --- # -configopts += '-DVTK_RENDERING_BACKEND:STRING=OpenGL2 ' - -# OpenGL (hardware) -# https://kitware.github.io/paraview-docs/latest/cxx/Offscreen.html -# If VTK_OPENGL_HAS_EGL or VTK_OPENGL_HAS_OSMESA is ON, the build supports headless rendering, -# otherwise VTK_USE_X must be ON and the build does not support headless, -# but can still support offscreen rendering. -# If VTK_USE_X is OFF, then either VTK_OPENGL_HAS_OSMESA or VTK_OPENGL_HAS_EGL must be ON. -# Then the build does not support onscreen rendering, but only headless rendering. -# If PARAVIEW_BUILD_QT_GUI is ON and VTK_USE_X is ON, while ParaView command line tools won't link against -# or use X calls, Qt will and hence an accessible X server is still needed to run the desktop client. -# If VTK_OPENGL_HAS_OSMESA is ON, and VTK_USE_X is ON, -# then all the OpenGL and OSMesa variables should point to the Mesa libraries. -# Likewise, if VTK_OPENGL_HAS_EGL is ON and VTK_USE_X is ON, then all the OpenGL and EGL variables -# should point to the system libraries providing both, typically the NVidia libraries. - -configopts += '-DOpenGL_GL_PREFERENCE=GLVND ' -configopts += '-DVTK_REPORT_OPENGL_ERRORS_IN_RELEASE_BUILDS=OFF ' - -configopts += "-DOPENGL_INCLUDE_DIR=${EBROOTOPENGL}/include " -configopts += "-DOPENGL_GLX_INCLUDE_DIR=${EBROOTOPENGL}/include " -configopts += "-DOPENGL_EGL_INCLUDE_DIR=${EBROOTOPENGL}/include " -# configopts += "-DOPENGL_xmesa_INCLUDE_DIR=IGNORE " - -configopts += "-DOPENGL_opengl_LIBRARY=${EBROOTOPENGL}/lib/libOpenGL.so.0 " -configopts += "-DOPENGL_gl_LIBRARY=${EBROOTOPENGL}/lib/libGL.so " -configopts += "-DOPENGL_glx_LIBRARY=${EBROOTOPENGL}/lib/libGLX.so.0 " -configopts += "-DOPENGL_glu_LIBRARY=${EBROOTOPENGL}/lib/libGLU.so " -configopts += "-DOPENGL_egl_LIBRARY=${EBROOTOPENGL}/lib/libEGL.so.1 " - -# OpenGL over X -# configopts += '-DVTK_USE_X=ON ' # OFF:headless rendering -# already considered by Qt (https://gitlab.kitware.com/lorensen/vtk/commit/b29f6db3f746d84f830c81e4212e48db192e4dbb) -# configopts += '-DVTK_DEFAULT_RENDER_WINDOW_OFFSCREEN=OFF ' -# configopts += '-DVTK_OPENGL_HAS_OSMESA=OFF ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D - -# EGL (off-screen rendering with OpenGL, but without the need for X) -# call pvserver with –egl-device-index=0 or 1 and –disable-xdisplay-test -configopts += '-DVTK_OPENGL_HAS_EGL=ON ' -configopts += '-DVTK_OPENGL_HAS_OSMESA=OFF ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D -configopts += '-DVTK_USE_X=OFF ' -configopts += '-DVTK_DEFAULT_EGL_DEVICE_INDEX=0 ' -# configopts += '-DEGL_INCLUDE_DIR=${EBROOTOPENGL}/include/EGL/ ' # https://www.khronos.org/registry/EGL/ -# configopts += '-DEGL_LIBRARY=${EBROOTOPENGL}/lib/libEGL.so.1 ' -# configopts += '-DEGL_opengl_LIBRARY=${EBROOTOPENGL}/lib/libOpenGL.so.0 ' -# configopts += '-DEGL_gldispatch_LIBRARY=${EBROOTOPENGL}/lib/libGLdispatch.so.0 ' # <path_to_libGLdispatch.so.0> - -# OSMesa (software) -# With OSMesa the DISPLAY variable has no meaning and is not needed -# When ON, implies that ParaView can use OSMesa to support headless modes of operation. -# configopts += '-DVTK_OPENGL_HAS_OSMESA=ON ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D -# configopts += '-DVTK_USE_X=OFF ' -# configopts += '-DVTK_DEFAULT_RENDER_WINDOW_OFFSCREEN=ON ' -# configopts += '-DOSMESA_INCLUDE_DIR=${EBROOTOPENGL}/include ' -# configopts += '-DOSMESA_LIBRARY=${EBROOTOPENGL}/lib/libOSMesa.so ' - -# Raytracing -configopts += '-DPARAVIEW_ENABLE_RAYTRACING=ON ' -configopts += '-DVTK_ENABLE_OSPRAY=ON ' -configopts += '-DVTK_ENABLE_VISRTX=OFF ' - -configopts += '-DPARAVIEW_USE_OSPRAY=ON ' -configopts += "-Dospray_DIR=${EBROOTOSPRAY} " -configopts += "-Dembree_DIR=${EBROOTEMBREE}/lib64/cmake/embree-3.12.2 " -configopts += '-DModule_vtkRenderingOSPRay=ON ' -configopts += '-DVTKOSPRAY_ENABLE_DENOISER=OFF ' - -# --- extra libs --- # -# configopts += "-DVTKm_DIR=$EBROOTVTKM/lib/cmake/vtkm-1.1/ " -configopts += '-DPARAVIEW_USE_VTKM=ON ' -configopts += '-DModule_vtkAcceleratorsVTKm=ON ' -configopts += '-DVTKm_Vectorization=AVX2 ' -configopts += '-DVTKm_ENABLE_OPENMP=ON ' -# configopts += '-DVTKm_ENABLE_MPI=ON ' -# configopts += '-DVTKm_ENABLE_LOGGING=ON ' -# configopts += '-DVTKm_ENABLE_CUDA=ON ' - -configopts += "-DFFMPEG_ROOT=$EBROOTFFMPEG " -configopts += '-DPARAVIEW_ENABLE_FFMPEG=ON ' -configopts += '-DModule_vtkIOFFMPEG=ON ' -configopts += '-DModule_vtkIOVideo=ON ' - -configopts += '-DModule_SignedTensor=ON ' - -configopts += '-DModule_vtkDICOM=ON ' - -# configopts += '-DModule_vtkFiltersMatlab=OFF ' -configopts += '-DModule_vtkFiltersReebGraph=ON ' -configopts += '-DModule_vtkFiltersSMP=ON ' -configopts += '-DModule_vtkFiltersSelection=ON ' -# configopts += '-DModule_vtkFiltersStatisticsGnu=OFF ' -configopts += '-DModule_vtkFiltersTopology=ON ' - -# --- coupling --- # -# configopts += '-DPARAVIEW_ENABLE_CATALYST=ON ' # variable is obsolete and no longer has any effect - -# --- development & testing --- # -configopts += '-DPARAVIEW_INSTALL_DEVELOPMENT_FILES=ON ' -configopts += '-DPARAVIEW_BUILD_DEVELOPER_DOCUMENTATION=OFF ' -configopts += '-DPARAVIEW_BUILD_EXAMPLES=OFF ' -configopts += '-DPARAVIEW_BUILD_TESTING=OFF ' -configopts += '-DPARAVIEW_BUILD_VTK_TESTING=OFF ' -configopts += '-DCTEST_TEST_TIMEOUT=10800 ' -configopts += '-DExternalData_TIMEOUT_INACTIVITY=0 ' - -# --- XDMF options --- # -configopts += '-DXDMF_USE_BZIP2=ON ' -configopts += '-DXDMF_USE_GZIP=ON ' - -# --- VTK external libraries --- # -configopts += '-DVTK_USE_SYSTEM_EXPAT=ON ' -configopts += '-DVTK_USE_SYSTEM_FREETYPE=ON ' -configopts += '-DVTK_USE_SYSTEM_HDF5=ON ' -configopts += '-DVTK_USE_SYSTEM_JPEG=ON ' -configopts += '-DVTK_USE_SYSTEM_LIBXML2=ON ' -configopts += '-DVTK_USE_SYSTEM_MPI4PY=ON ' -configopts += '-DVTK_USE_SYSTEM_NETCDF=ON ' -configopts += '-DVTK_USE_SYSTEM_PNG=ON ' -configopts += '-DVTK_USE_SYSTEM_PYGMENTS=ON ' -configopts += '-DVTK_USE_SYSTEM_SIX=ON ' -configopts += '-DVTK_USE_SYSTEM_TIFF=ON ' -configopts += '-DVTK_USE_SYSTEM_ZLIB=ON ' -configopts += '-DNETCDF_CXX_ROOT=$EBROOTNETCDFMINCPLUSPLUS ' -configopts += '-DNETCDF_F77_ROOT=$EBROOTNETCDFMINFORTRAN ' -configopts += '-DNETCDF_F90_ROOT=$EBROOTNETCDFMINFORTRAN ' - -# --- ParaView Extra-Reader --- # -configopts += '-DPARAVIEW_PLUGIN_ENABLE_ParFlow=ON ' - -configopts += '-DPARAVIEW_ENABLE_VISITBRIDGE=ON ' -configopts += '-DVISIT_BUILD_READER_Nek5000=ON ' -# configopts += '-DVISIT_BUILD_READER_Boxlib3D=ON ' # req. external dependency -# configopts += '-DVISIT_BUILD_READER_Mili=ON ' # req. external dependency -# configopts += '-DVISIT_BUILD_READER_Silo=ON ' # req. external dependency - -# --- ParaView Plugin Autoload --- # -# configopts += '-DPARAVIEW_AUTOLOAD_PLUGIN_AnalyzeNIfTIIO=ON ' -# configopts += '-DPARAVIEW_AUTOLOAD_PLUGIN_SurfaceLIC=ON ' # can result in error -# ( 489.245s) [paraview ]vtkOpenGLVertexArrayObj:293 ERR| vtkOpenGLVertexArrayObject (0x3cd7bc0) -# ( 489.245s) [paraview ]vtkOpenGLRenderUtilitie:78 WARN| Error setting 'vertexMC' in shader VAO. -# https://www.paraview.org/pipermail/paraview/2016-March/036518.html - -# --- infos -# -- Plugin: AcceleratedAlgorithms - Testing plugin : Enabled -# -- Plugin: AdiosReader - Reader for *.bp files based on Adios : Disabled -# -- Plugin: AdiosStagingReader - Performs staging reads from simulations using ADIOS : Disabled -# -- Plugin: AnalyzeNIfTIIO - Reader/Writer for Analyze and NifTI files : Enabled -# -- Plugin: ArrowGlyph - Glyph with customizable Arrows : Enabled -# -- Plugin: CDIReader - ICON netCDF/CDI Reader : Enabled -# -- Plugin: DigitalRockPhysics - Digital Rock Physics analysis filters : Enabled -# -- Plugin: EmbossingRepresentations - Embossing representations : Enabled -# -- Plugin: EyeDomeLighting - Add 3D View with eye-dome Lighting support : Enabled -# -- Plugin: GMVReader - Reader for binary or ASCII files stored in General Mesh Viewer file format : Enabled -# -- Plugin: GenericIOReader - GenericIO Reader for HACC data : Enabled -# -- Plugin: GeodesicMeasurement - Geodesic Measurement : Enabled -# -- Plugin: GmshReader - Reader for visualization of high-order polynomial solutions under the Gmsh format : off -# -- Plugin: InSituExodus - Experimental memory-conserving Exodus II file reader. : Disabled -# -- Plugin: LagrangianParticleTracker - Highly customizable filter for particle tracking : Enabled -# -- Plugin: MooseXfemClip - Clip partial elements generated by MOOSE XFEM : Enabled -# -- Plugin: Moments - Filters for Flux and Circulation Fields : Enabled -# -- Plugin: NetCDFTimeAnnotationPlugin - Provides NetCDF Time Annotation filter : Enabled -# netcdftime Python module not found! NetCDFTimeAnnotationPlugin not be available until it is installed. -# -- Plugin 'NetCDFTimeAnnotationPlugin' lists plugin library named 'NetCDFTimeAnnotationPlugin' -# -- Plugin: NonOrthogonalSource - Non Orthogonal Source code sample : Enabled -# -- Plugin: OpenVR - OpenVR Support : Disabled -# -- Plugin: PythonQtPlugin - PythonQt Plugin : Disabled -# -- Plugin: SLACTools - SLAC Tools : Enabled -# -- Plugin: SierraPlotTools - Sierra Plotting Tools : Enabled -# -- Plugin: StreamLinesRepresentation - Add animated Stream Lines representation for any type of dataset : Enabled -# -- Plugin: StreamingParticles - Render Particles with Streaming : Enabled -# -- Plugin: SurfaceLIC - Add Surface-LIC vector visualization support : Enabled -# -- Plugin: TemporalParallelismScriptGenerator - Plugin for creating Python spatio-temporal processing scripts : off -# -- Plugin: PacMan - Testing plugin : Enabled -# -- Plugin: ThickenLayeredCells - Filter to thicken layered cells : Enabled -# -- Plugin: VRPlugin - Virtual Reality Devices and Interactor styles : Disabled -# -- Plugin: VTKmFilters - VTKm many-core filters : Enabled -# -- Plugin: VaporPlugin - Plugin to read NCAR VDR files : Disabled -# -- Plugin: pvNVIDIAIndeX - Plugin for NVIDIA IndeX : Enabled - -# ######################################################### -# The ParaView server can be cranky, test downloads are quite often failing -# Using ; insted of && gives a second chance to download the test files, if the first serial attempt would fail. -# prebuildopts = 'make VTKData ;' # only if: configopts += '-DBUILD_TESTING=ON' - -postinstallcmds = ['python -m compileall %(installdir)s/lib64/python3.6/site-packages/'] -# 'cp -a %(builddir)s/ParaView-v%(version)s/ %(installdir)s/src', # copy source from build dir to install dir -# '', # move debug info to separate files: -# http://stackoverflow.com/questions/866721/how-to-generate-gcc-debug-symbol-outside-the-build-target -# '', # debugedit -i --base-dir=%(builddir)s/ParaView-v%(version)s --dest-dir= %(installdir)s/src <file.debug> -# # change path to source in debug info - -modextravars = {'CUDA_VISIBLE_DEVICES': '0,1'} - -# OpenSWR fully supports OpenGL 3.0 and most of 3.3, but ParaView requires 3.3 -> clame to fully support 3.3 -modextravars = {'MESA_GL_VERSION_OVERRIDE': '3.3'} -modextravars = {'MESA_GLSL_VERSION_OVERRIDE': '330'} - -modextravars = { - # OpenMP will choose an optimum number of threads by default, which is usually the number of cores - # 'OMP_NUM_THREADS': '28', # fix number of threads used by paraview filters and parallel sections in the code - # threads used by ospray - details https://github.com/ospray/ospray/blob/release-2.0.x/ospray/api/Device.cpp#L88 - # unset => OSPRAY uses all hardware threads - # 'OSPRAY_THREADS': '14', # OSPRay < 2.0 - # 'OSPRAY_NUM_THREADS': '14', # OSPRay >= 2.0 - # When TBB is used for OSPRAY: tbb::task_scheduler_init::default_num_threads() is default if no OSPRAY_NUM_THREADS - # https://github.com/ospray/ospcommon/blob/master/ospcommon/tasking/detail/tasking_system_init.cpp#L47 - # https://www.threadingbuildingblocks.org/docs/doxygen/a00150.html - # more ospray definitions: https://www.ospray.org/documentation.html#environment-variables - 'KNOB_MAX_WORKER_THREADS': '65535', # max. threads used by OpenSWR (limited by number of hardware threads) - # details in https://gitlab.version.fz-juelich.de/vis/vis-software/issues/14 - # more knob defs: https://github.com/mesa3d/mesa/blob/master/src/gallium/docs/source/drivers/openswr/knobs.rst -} - -modextrapaths = {'PYTHONPATH': 'lib64/python%(pyshortver)s/site-packages'} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/ParaView/ParaView-5.9.1-gpsmkl-2021-Python-3.8.5.eb b/Golden_Repo/p/ParaView/ParaView-5.9.1-gpsmkl-2021-Python-3.8.5.eb deleted file mode 100644 index 7e7e4828cf67f23eef1121adc84944ae79c2afda..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParaView/ParaView-5.9.1-gpsmkl-2021-Python-3.8.5.eb +++ /dev/null @@ -1,341 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'ParaView' -version = '5.9.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = "http://www.paraview.org" -description = """Paraview is a scientific parallel visualizer. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -local_dwnlsfx_src = 'download.php?submit=Download&version=v%(version_major_minor)s&type=source&os=Sources&downloadFile=' -source_urls = [('http://www.paraview.org/paraview-downloads/%s' % local_dwnlsfx_src)] -sources = [("ParaView-v%(version)s.tar.gz")] -checksums = [('sha256', 'efbcba00ba38c23d0ada1bde7144a8745caa308d9e1f94a4a71d8af63732266f')] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('git', '2.28.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Boost', '1.74.0', '', ('gpsmpi', '2020')), - ('X11', '20200222'), - ('bzip2', '1.0.8'), - ('HDF5', '1.10.6'), - ('FFmpeg', '4.3.1'), - ('Embree', '3.12.2'), - ('OSPRay', '2.6.0'), - ('libpng', '1.6.37'), - ('expat', '2.2.9'), - ('freetype', '2.10.1'), - ('libjpeg-turbo', '2.0.5'), - ('libxml2', '2.9.10'), - ('LibTIFF', '4.1.0'), - ('zlib', '1.2.11'), - ('netCDF', '4.7.4'), - ('netCDF-C++4', '4.3.1'), - ('netCDF-Fortran', '4.5.3'), - ('mpi4py', '3.0.3', '-Python-%(pyver)s'), - ('nlohmann-json', '3.9.1'), # for ParFlow plugin - # ('VTK', '8.2.0', '-Python-%(pyver)s', ('gcccoremkl', '8.3.0-2019.3.199')), - # ('VTKm','1.1.0','-AVX2'), - ('Qt5', '5.14.2'), - ('SciPy-Stack', '2021', '-Python-%(pyver)s', ('gcccoremkl', '10.3.0-2021.2.0')), - ('VirtualGL', '2.6.4'), - ('OpenGL', '2020'), -] - -separate_build_dir = True -# parallel = 24 - -######################################################################################## -# check ParaView Superbuild options # -# https://gitlab.kitware.com/paraview/paraview-superbuild/tree/master # -# # -# check ParaView Build documenation # -# https://gitlab.kitware.com/paraview/paraview/blob/master/Documentation/dev/build.md # -######################################################################################## - -configopts = '-DCMAKE_POLICY_DEFAULT_CMP0074=OLD ' -configopts += '-DCMAKE_POLICY_DEFAULT_CMP0077=OLD ' - -# --- general settings --- # -configopts += '-DCMAKE_CXX_STANDARD=11 ' -configopts += '-DCXX_STANDARD_REQUIRED=ON ' -configopts += '-DCMAKE_CXX_EXTENSIONS=OFF ' # eg.-std=c++11 rather than -std=gnu++11 -configopts += '-DVTK_USE_CXX11_FEATURES=ON ' -# configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' - -configopts += '-DBUILD_TESTING=OFF ' # Without internet connection turn off testing -# Or consult https://gitlab.kitware.com/vtk/vtk/blob/master/Documentation/dev/git/data.md -# and download ExternalData to $EASYBUILD_SOURCEPATH and adjust -DExternalData_OBJECT_STORES accordingly -# configopts += '-DExternalData_OBJECT_STORES=%(builddir)s/ExternalData ' - -configopts += '-DBUILD_EXAMPLES=ON ' - -configopts += '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DZLIB_LIBRARY_DEBUG=$EBROOTZLIB/lib/libz.so ' - -# https://forum.openframeworks.cc/t/nvidia-drivers-pthreads-and-segfaults/2524 -configopts += '-DCMAKE_CXX_FLAGS="-lpthread $CMAKE_CXX_FLAGS" ' -configopts += '-DCMAKE_C_FLAGS="-lpthread $CMAKE_C_FLAGS" ' - -configopts += '-DPARAVIEW_BUILD_EDITION=CANONICAL ' -configopts += '-DPARAVIEW_BUILD_WITH_KITS=OFF ' -configopts += '-DPARAVIEW_USE_QT=ON ' -configopts += '-DPARAVIEW_QT_VERSION=5 ' -configopts += '-DPARAVIEW_ENABLE_WEB=ON ' - -configopts += '-DPARAVIEW_SHARED_LIBS=ON ' -configopts += '-DPARAVIEW_USE_PYTHON=ON ' -configopts += '-DPARAVIEW_PYTHON_VERSION=3 ' -configopts += "-DPYTHON_EXECUTABLE=$EBROOTPYTHON/bin/python " - -configopts += '-DVTK_PYTHON_VERSION=3 ' -configopts += '-DVTK_NO_PYTHON_THREADS=OFF ' -configopts += '-DVTK_PYTHON_FULL_THREADSAFE=OFF ' # visibility depends on VTK_NO_PYTHON_THREADS=OFF -# If you pass VTK_PYTHON_FULL_THREADSAFE to true, then each and every call to python will be protected with GIL, -# ensuring that you can have eg. other python interpreter in your application and still use python wrapping in vtk. -# configopts += '-DUSE_EXTERNAL_VTK:BOOL=ON ' - -# --- parallel (on-node) --- # -# https://blog.kitware.com/simple-parallel-computing-with-vtksmptools-2/ -configopts += '-DVTK_SMP_IMPLEMENTATION_TYPE=OpenMP ' - -# --- parallel (distributed) --- # -configopts += '-DMPIEXEC_MAX_NUMPROCS=24 ' -configopts += '-DPARAVIEW_USE_MPI=ON ' - -# --- IO --- # -configopts += '-DXDMF_BUILD_MPI=ON ' -configopts += '-DPARAVIEW_ENABLE_XDMF3=ON ' - -# --- large data --- # -configopts += '-DVTK_USE_64BIT_IDS=ON ' -configopts += '-DVTK_USE_LARGE_DATA=ON ' - -# --- rendering --- # -configopts += '-DVTK_RENDERING_BACKEND:STRING=OpenGL2 ' - -# OpenGL (hardware) -# https://kitware.github.io/paraview-docs/latest/cxx/Offscreen.html -# If VTK_OPENGL_HAS_EGL or VTK_OPENGL_HAS_OSMESA is ON, the build supports headless rendering, -# otherwise VTK_USE_X must be ON and the build does not support headless, -# but can still support offscreen rendering. -# If VTK_USE_X is OFF, then either VTK_OPENGL_HAS_OSMESA or VTK_OPENGL_HAS_EGL must be ON. -# Then the build does not support onscreen rendering, but only headless rendering. -# If PARAVIEW_BUILD_QT_GUI is ON and VTK_USE_X is ON, while ParaView command line tools won't link against -# or use X calls, Qt will and hence an accessible X server is still needed to run the desktop client. -# If VTK_OPENGL_HAS_OSMESA is ON, and VTK_USE_X is ON, -# then all the OpenGL and OSMesa variables should point to the Mesa libraries. -# Likewise, if VTK_OPENGL_HAS_EGL is ON and VTK_USE_X is ON, then all the OpenGL and EGL variables -# should point to the system libraries providing both, typically the NVidia libraries. - -configopts += '-DOpenGL_GL_PREFERENCE=GLVND ' -configopts += '-DVTK_REPORT_OPENGL_ERRORS_IN_RELEASE_BUILDS=OFF ' - -configopts += "-DOPENGL_INCLUDE_DIR=${EBROOTOPENGL}/include " -configopts += "-DOPENGL_GLX_INCLUDE_DIR=${EBROOTOPENGL}/include " -configopts += "-DOPENGL_EGL_INCLUDE_DIR=${EBROOTOPENGL}/include " -# configopts += "-DOPENGL_xmesa_INCLUDE_DIR=IGNORE " - -configopts += "-DOPENGL_opengl_LIBRARY=${EBROOTOPENGL}/lib/libOpenGL.so.0 " -configopts += "-DOPENGL_gl_LIBRARY=${EBROOTOPENGL}/lib/libGL.so " -configopts += "-DOPENGL_glx_LIBRARY=${EBROOTOPENGL}/lib/libGLX.so.0 " -configopts += "-DOPENGL_glu_LIBRARY=${EBROOTOPENGL}/lib/libGLU.so " -configopts += "-DOPENGL_egl_LIBRARY=${EBROOTOPENGL}/lib/libEGL.so.1 " - -# OpenGL over X -configopts += '-DVTK_USE_X=ON ' # OFF:headless rendering -# already considered by Qt (https://gitlab.kitware.com/lorensen/vtk/commit/b29f6db3f746d84f830c81e4212e48db192e4dbb) -configopts += '-DVTK_DEFAULT_RENDER_WINDOW_OFFSCREEN=OFF ' -configopts += '-DVTK_OPENGL_HAS_OSMESA=OFF ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D - -# EGL (off-screen rendering with OpenGL, but without the need for X) -# call pvserver with –egl-device-index=0 or 1 and –disable-xdisplay-test -# configopts += '-DVTK_OPENGL_HAS_EGL=ON ' -# configopts += '-DVTK_OPENGL_HAS_OSMESA=OFF ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D -# configopts += '-DVTK_USE_X=OFF ' -# configopts += '-DVTK_DEFAULT_EGL_DEVICE_INDEX=0 ' -# #configopts += '-DEGL_INCLUDE_DIR=${EBROOTOPENGL}/include/EGL/ ' # https://www.khronos.org/registry/EGL/ -# #configopts += '-DEGL_LIBRARY=${EBROOTOPENGL}/lib/libEGL.so.1 ' -# #configopts += '-DEGL_opengl_LIBRARY=${EBROOTOPENGL}/lib/libOpenGL.so.0 ' -# #configopts += '-DEGL_gldispatch_LIBRARY=${EBROOTOPENGL}/lib/libGLdispatch.so.0 ' # <path_to_libGLdispatch.so.0> - -# OSMesa (software) -# With OSMesa the DISPLAY variable has no meaning and is not needed -# When ON, implies that ParaView can use OSMesa to support headless modes of operation. -# configopts += '-DVTK_OPENGL_HAS_OSMESA=ON ' # http://www.paraview.org/Wiki/ParaView_And_Mesa_3D -# configopts += '-DVTK_USE_X=OFF ' -# configopts += '-DVTK_DEFAULT_RENDER_WINDOW_OFFSCREEN=ON ' -# configopts += '-DOSMESA_INCLUDE_DIR=${EBROOTOPENGL}/include ' -# configopts += '-DOSMESA_LIBRARY=${EBROOTOPENGL}/lib/libOSMesa.so ' - -# Raytracing -configopts += '-DPARAVIEW_ENABLE_RAYTRACING=ON ' -configopts += '-DVTK_ENABLE_OSPRAY=ON ' -configopts += '-DVTK_ENABLE_VISRTX=OFF ' - -configopts += '-DPARAVIEW_USE_OSPRAY=ON ' -configopts += "-Dospray_DIR=${EBROOTOSPRAY} " -configopts += "-Dembree_DIR=${EBROOTEMBREE}/lib64/cmake/embree-3.12.2 " -configopts += '-DModule_vtkRenderingOSPRay=ON ' -configopts += '-DVTKOSPRAY_ENABLE_DENOISER=OFF ' - -# --- extra libs --- # -# configopts += "-DVTKm_DIR=$EBROOTVTKM/lib/cmake/vtkm-1.1/ " -configopts += '-DPARAVIEW_USE_VTKM=ON ' -configopts += '-DModule_vtkAcceleratorsVTKm=ON ' -configopts += '-DVTKm_Vectorization=AVX2 ' -configopts += '-DVTKm_ENABLE_OPENMP=ON ' -# configopts += '-DVTKm_ENABLE_MPI=ON ' -# configopts += '-DVTKm_ENABLE_LOGGING=ON ' -# configopts += '-DVTKm_ENABLE_CUDA=ON ' - -configopts += "-DFFMPEG_ROOT=$EBROOTFFMPEG " -configopts += '-DPARAVIEW_ENABLE_FFMPEG=ON ' -configopts += '-DModule_vtkIOFFMPEG=ON ' -configopts += '-DModule_vtkIOVideo=ON ' - -configopts += '-DModule_SignedTensor=ON ' - -configopts += '-DModule_vtkDICOM=ON ' - -# configopts += '-DModule_vtkFiltersMatlab=OFF ' -configopts += '-DModule_vtkFiltersReebGraph=ON ' -configopts += '-DModule_vtkFiltersSMP=ON ' -configopts += '-DModule_vtkFiltersSelection=ON ' -# configopts += '-DModule_vtkFiltersStatisticsGnu=OFF ' -configopts += '-DModule_vtkFiltersTopology=ON ' - -# --- coupling --- # -# configopts += '-DPARAVIEW_ENABLE_CATALYST=ON ' # variable is obsolete and no longer has any effect - -# --- development & testing --- # -configopts += '-DPARAVIEW_INSTALL_DEVELOPMENT_FILES=ON ' -configopts += '-DPARAVIEW_BUILD_DEVELOPER_DOCUMENTATION=OFF ' -configopts += '-DPARAVIEW_BUILD_EXAMPLES=OFF ' -configopts += '-DPARAVIEW_BUILD_TESTING=OFF ' -configopts += '-DPARAVIEW_BUILD_VTK_TESTING=OFF ' -configopts += '-DCTEST_TEST_TIMEOUT=10800 ' -configopts += '-DExternalData_TIMEOUT_INACTIVITY=0 ' - -# --- XDMF options --- # -configopts += '-DXDMF_USE_BZIP2=ON ' -configopts += '-DXDMF_USE_GZIP=ON ' - -# --- VTK external libraries --- # -configopts += '-DVTK_USE_SYSTEM_EXPAT=ON ' -configopts += '-DVTK_USE_SYSTEM_FREETYPE=ON ' -configopts += '-DVTK_USE_SYSTEM_HDF5=ON ' -configopts += '-DVTK_USE_SYSTEM_JPEG=ON ' -configopts += '-DVTK_USE_SYSTEM_LIBXML2=ON ' -configopts += '-DVTK_USE_SYSTEM_MPI4PY=ON ' -configopts += '-DVTK_USE_SYSTEM_NETCDF=ON ' -configopts += '-DVTK_USE_SYSTEM_PNG=ON ' -configopts += '-DVTK_USE_SYSTEM_PYGMENTS=ON ' -configopts += '-DVTK_USE_SYSTEM_SIX=ON ' -configopts += '-DVTK_USE_SYSTEM_TIFF=ON ' -configopts += '-DVTK_USE_SYSTEM_ZLIB=ON ' -configopts += '-DNETCDF_CXX_ROOT=$EBROOTNETCDFMINCPLUSPLUS ' -configopts += '-DNETCDF_F77_ROOT=$EBROOTNETCDFMINFORTRAN ' -configopts += '-DNETCDF_F90_ROOT=$EBROOTNETCDFMINFORTRAN ' - -# --- ParaView Extra-Reader --- # -configopts += '-DPARAVIEW_PLUGIN_ENABLE_ParFlow=ON ' - -# https://gitlab.kitware.com/paraview/visitbridge/-/blob/master/databases/CMakeLists.txt -configopts += '-DPARAVIEW_ENABLE_VISITBRIDGE=ON ' -configopts += '-DVISIT_BUILD_READER_Nek5000=ON ' -# configopts += '-DVISIT_BUILD_READER_Boxlib3D=ON ' # req. external dependency -# configopts += '-DVISIT_BUILD_READER_Mili=ON ' # req. external dependency -# configopts += '-DVISIT_BUILD_READER_Silo=ON ' # req. external dependency - -# --- ParaView Plugin Autoload --- # -# configopts += '-DPARAVIEW_AUTOLOAD_PLUGIN_AnalyzeNIfTIIO=ON ' -# configopts += '-DPARAVIEW_AUTOLOAD_PLUGIN_SurfaceLIC=ON ' # can result in error -# ( 489.245s) [paraview ]vtkOpenGLVertexArrayObj:293 ERR| vtkOpenGLVertexArrayObject (0x3cd7bc0) -# ( 489.245s) [paraview ]vtkOpenGLRenderUtilitie:78 WARN| Error setting 'vertexMC' in shader VAO. -# https://www.paraview.org/pipermail/paraview/2016-March/036518.html - -# --- infos -# -- Plugin: AcceleratedAlgorithms - Testing plugin : Enabled -# -- Plugin: AdiosReader - Reader for *.bp files based on Adios : Disabled -# -- Plugin: AdiosStagingReader - Performs staging reads from simulations using ADIOS : Disabled -# -- Plugin: AnalyzeNIfTIIO - Reader/Writer for Analyze and NifTI files : Enabled -# -- Plugin: ArrowGlyph - Glyph with customizable Arrows : Enabled -# -- Plugin: CDIReader - ICON netCDF/CDI Reader : Enabled -# -- Plugin: DigitalRockPhysics - Digital Rock Physics analysis filters : Enabled -# -- Plugin: EmbossingRepresentations - Embossing representations : Enabled -# -- Plugin: EyeDomeLighting - Add 3D View with eye-dome Lighting support : Enabled -# -- Plugin: GMVReader - Reader for binary or ASCII files stored in General Mesh Viewer file format : Enabled -# -- Plugin: GenericIOReader - GenericIO Reader for HACC data : Enabled -# -- Plugin: GeodesicMeasurement - Geodesic Measurement : Enabled -# -- Plugin: GmshReader - Reader for visualization of high-order polynomial solutions under the Gmsh format : off -# -- Plugin: InSituExodus - Experimental memory-conserving Exodus II file reader. : Disabled -# -- Plugin: LagrangianParticleTracker - Highly customizable filter for particle tracking : Enabled -# -- Plugin: MooseXfemClip - Clip partial elements generated by MOOSE XFEM : Enabled -# -- Plugin: Moments - Filters for Flux and Circulation Fields : Enabled -# -- Plugin: NetCDFTimeAnnotationPlugin - Provides NetCDF Time Annotation filter : Enabled -# netcdftime Python module not found! NetCDFTimeAnnotationPlugin not be available until it is installed. -# -- Plugin 'NetCDFTimeAnnotationPlugin' lists plugin library named 'NetCDFTimeAnnotationPlugin' -# -- Plugin: NonOrthogonalSource - Non Orthogonal Source code sample : Enabled -# -- Plugin: OpenVR - OpenVR Support : Disabled -# -- Plugin: PythonQtPlugin - PythonQt Plugin : Disabled -# -- Plugin: SLACTools - SLAC Tools : Enabled -# -- Plugin: SierraPlotTools - Sierra Plotting Tools : Enabled -# -- Plugin: StreamLinesRepresentation - Add animated Stream Lines representation for any type of dataset : Enabled -# -- Plugin: StreamingParticles - Render Particles with Streaming : Enabled -# -- Plugin: SurfaceLIC - Add Surface-LIC vector visualization support : Enabled -# -- Plugin: TemporalParallelismScriptGenerator - Plugin for creating Python spatio-temporal processing scripts : off -# -- Plugin: PacMan - Testing plugin : Enabled -# -- Plugin: ThickenLayeredCells - Filter to thicken layered cells : Enabled -# -- Plugin: VRPlugin - Virtual Reality Devices and Interactor styles : Disabled -# -- Plugin: VTKmFilters - VTKm many-core filters : Enabled -# -- Plugin: VaporPlugin - Plugin to read NCAR VDR files : Disabled -# -- Plugin: pvNVIDIAIndeX - Plugin for NVIDIA IndeX : Enabled - -# ######################################################### -# The ParaView server can be cranky, test downloads are quite often failing -# Using ; insted of && gives a second chance to download the test files, if the first serial attempt would fail. -# prebuildopts = 'make VTKData ;' # only if: configopts += '-DBUILD_TESTING=ON' - -postinstallcmds = ['python -m compileall %(installdir)s/lib64/python3.6/site-packages/'] -# 'cp -a %(builddir)s/ParaView-v%(version)s/ %(installdir)s/src', # copy source from build dir to install dir -# '', # move debug info to separate files: -# http://stackoverflow.com/questions/866721/how-to-generate-gcc-debug-symbol-outside-the-build-target -# '', # debugedit -i --base-dir=%(builddir)s/ParaView-v%(version)s --dest-dir= %(installdir)s/src <file.debug> -# # change path to source in debug info - -modextravars = {'CUDA_VISIBLE_DEVICES': '0,1'} - -# OpenSWR fully supports OpenGL 3.0 and most of 3.3, but ParaView requires 3.3 -> clame to fully support 3.3 -modextravars = {'MESA_GL_VERSION_OVERRIDE': '3.3'} -modextravars = {'MESA_GLSL_VERSION_OVERRIDE': '330'} - -modextravars = { - # OpenMP will choose an optimum number of threads by default, which is usually the number of cores - # 'OMP_NUM_THREADS': '28', # fix number of threads used by paraview filters and parallel sections in the code - # threads used by ospray - details https://github.com/ospray/ospray/blob/release-2.0.x/ospray/api/Device.cpp#L88 - # unset => OSPRAY uses all hardware threads - # 'OSPRAY_THREADS': '14', # OSPRay < 2.0 - # 'OSPRAY_NUM_THREADS': '14', # OSPRay >= 2.0 - # When TBB is used for OSPRAY: tbb::task_scheduler_init::default_num_threads() is default if no OSPRAY_NUM_THREADS - # https://github.com/ospray/ospcommon/blob/master/ospcommon/tasking/detail/tasking_system_init.cpp#L47 - # https://www.threadingbuildingblocks.org/docs/doxygen/a00150.html - # more ospray definitions: https://www.ospray.org/documentation.html#environment-variables - 'KNOB_MAX_WORKER_THREADS': '65535', # max. threads used by OpenSWR (limited by number of hardware threads) - # details in https://gitlab.version.fz-juelich.de/vis/vis-software/issues/14 - # more knob defs: https://github.com/mesa3d/mesa/blob/master/src/gallium/docs/source/drivers/openswr/knobs.rst -} - -modextrapaths = {'PYTHONPATH': 'lib64/python%(pyshortver)s/site-packages'} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/ParaView/python-5.8.1-simpleCatalystSupport_0.4.patch b/Golden_Repo/p/ParaView/python-5.8.1-simpleCatalystSupport_0.4.patch deleted file mode 100644 index f6597d0076909e58396a52f60175b5605c6d291d..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/ParaView/python-5.8.1-simpleCatalystSupport_0.4.patch +++ /dev/null @@ -1,280 +0,0 @@ -diff -Naur ParaView-v5.7.0/Wrapping/Python/paraview/simple.py ParaView-v5.7.0-changed/Wrapping/Python/paraview/simple.py ---- ParaView-v5.7.0.orig/Wrapping/Python/paraview/simple.py 2019-09-26 23:10:51.000000000 +0200 -+++ ParaView-v5.7.0/Wrapping/Python/paraview/simple.py 2020-03-04 18:01:36.043382025 +0100 -@@ -47,6 +47,9 @@ - import sys - import warnings - -+from threading import Thread -+from time import sleep, time -+ - if sys.version_info >= (3,): - xrange = range - -@@ -2443,6 +2446,266 @@ - view = property(get_view, set_view) - source = property(get_source, set_source) - -+ -+class CatalystConnection: -+ -+ _EventChecker = None -+ _version = "0.4.0" -+ -+ def vtkObserver(self, EventObject, EventType): -+ #print(EventType) -+ if EventType is None: -+ return None -+ if EventType == "UpdateEvent": # called for new insitu data -+ for i in range(len(self.updatefunctions)): -+ self.updatefunctions[i]() -+ return True -+ if EventType == "ConnectionCreatedEvent": # called when Connection is established -+ self._connected = True -+ return True -+ if EventType == "ConnectionClosedEvent": # called when Connection is closed -+ self._connected = False -+ self._isPaused = False -+ return True -+ if EventType == "UpdatePropertyEvent": -+ return True -+ if EventType == "PropertyModifiedEvent": -+ return True -+ if EventType == "ModifiedEvent": -+ return True -+ servermanager.vtk.vtkLogger.Log(servermanager.vtk.vtkLogger.VERBOSITY_WARNING, __file__, 31, "Unknown Event Type in catalyst connection: " + EventType) -+ return False -+ -+ -+ def __init__(self,): -+ self.updatefunctions = [] -+ self._connected = False -+ self._session = None -+ self._delay = 0.01 -+ -+ servermanager.vtkProcessModule.GetProcessModule().MultipleSessionsSupportOn() -+ -+ if not CatalystConnection._EventChecker: -+ _EventChecker = CatalystConnection.__EventChecker(self._delay) -+ _EventChecker.StartRegularChecks() -+ -+ -+ def Start(self, port = 22222): -+ """ -+ Open Catalyst port, so the simulation can connect -+ """ -+ if self.IsConnected(): -+ return False; -+ -+ activeConnection = servermanager.ActiveConnection -+ -+ self._InProxy = servermanager.CreateProxy("coprocessing","LiveInsituLink") -+ adaptor = self._InProxy.SafeDownCast(self._InProxy) -+ -+ adaptor.AddObserver(servermanager.vtk.vtkCommand.AnyEvent, self.vtkObserver) -+ -+ servermanager.vtkSMPropertyHelper(adaptor, "InsituPort").Set(port) -+ servermanager.vtkSMPropertyHelper(adaptor, "ProcessType").Set("Visualization") -+ -+ adaptor.UpdateVTKObjects() -+ -+ self._session = adaptor.GetSession() -+ self._connection = servermanager.GetConnectionFromId(self._session.ConnectToCatalyst()) -+ self._isPaused = False -+ -+ self._InProxy.SetInsituProxyManager(self._connection.Session.GetSessionProxyManager()) -+ -+ adaptor.InvokeCommand("Initialize") -+ -+ servermanager.SetActiveConnection(activeConnection) -+ -+ -+ -+ def IsConnected(self): -+ """ -+ True if the simulation is connected, false if the simulation is not yet connected, or disconnected -+ """ -+ return self._connected -+ -+ def IsPaused(self): -+ """ -+ True if the simulation is Paused, false if the simulation is running, or there is not connection to a simulation -+ """ -+ return self._isPaused -+ -+ def BlockTillConnected(self, timeout = 0): -+ """ -+ block till connection is established by simulation, or the timeout in seconds is eclipsed, default is 0, therefore without timeout -+ """ -+ start = time() -+ while (time() - start < timeout) or timeout == 0: -+ sleep(self._delay) -+ if self.IsConnected(): -+ return True -+ return False; -+ -+ def BlockTillNextUpdate(self, timeout = 0): -+ """ -+ block till next update happend by simulation, or the timeout in seconds is eclipsed, default is 0, therefore without timeout -+ """ -+ start = time() -+ oldUpdates = self.GetTimeStep() -+ while (time() - start < timeout) or timeout == 0: -+ sleep(self._delay) -+ if oldUpdates < self.GetTimeStep(): -+ return True -+ return False; -+ -+ def BlockTillTimeStepAndPause(self, TimeStep, timeout = 0): -+ """ -+ block till time step is reached and then pause the simulation, or the timeout in seconds is eclipsed, default is 0, therefore without timeout. -+ """ -+ start = time() -+ while (time() - start < timeout) or timeout == 0: -+ sleep(self._delay) -+ if TimeStep -1 <= self.GetTimeStep(): # needs to step to pause the simulation, therefore activate pause one step early -+ self.SetPauseSimulation(True) -+ return True -+ return False; -+ -+ -+ def OutputAvaiable(self, ChannelInformation = ['input', 0]): -+ """ -+ Check if this combination of ChannelName and port is avaible in simulation data -+ """ -+ if self.IsConnected() == False: -+ return False -+ return self.GetCatalystSources().count(ChannelInformation) == 1 -+ -+ -+ def GetCatalystSources(self): -+ """ -+ List all avaiable combinations of data output the simulation offers -+ """ -+ if self.IsConnected() == False: -+ return None -+ collection = servermanager.vtk.vtkCollection() -+ self._InProxy.GetInsituProxyManager().GetProxies("sources", collection) -+ Channels = [] -+ for i in range(collection.GetNumberOfItems()): -+ proxy = collection.GetItemAsObject(i) -+ if proxy.GetVTKClassName() == "vtkPVTrivialProducer": -+ for j in range(proxy.GetNumberOfOutputPorts()): -+ Channels.append([proxy.GetLogName(), j]) -+ return Channels -+ -+ -+ def Extract(self, extractName, ChannelInformation = ['input', 0]): -+ """ -+ Extract sources from the catalyst connection into the normal pipeline, so it can be used. -+ Returns the Source Object, named extractName. On error returns None. -+ """ -+ if self.IsConnected() == False: -+ servermanager.vtk.vtkLogger.Log(servermanager.vtk.vtkLogger.VERBOSITY_WARN, __file__, 144, "Catalyst simulation is not yet connected") -+ return None -+ if self.OutputAvaiable(ChannelInformation) == False: -+ servermanager.vtk.vtkLogger.Log(servermanager.vtk.vtkLogger.VERBOSITY_WARN, __file__, 147, "No output for this channelInformation avaiable: " + str(ChannelInformation)) -+ return None -+ if FindSource(extractName) is not None: -+ servermanager.vtk.vtkLogger.Log(servermanager.vtk.vtkLogger.VERBOSITY_WARN, __file__, 150, "There is already a source named " + extractName) -+ return None -+ self.EProxy = self._InProxy.CreateExtract("sources", ChannelInformation[0], ChannelInformation[1]) -+ servermanager.ActiveConnection.Session.GetSessionProxyManager().RegisterProxy("sources", extractName, self.EProxy) -+ -+ return FindSource(extractName) -+ -+ def GetTimeStep(self): -+ """ -+ Returns the timeStep of the visible data -+ """ -+ return self._InProxy.GetTimeStep() -+ -+ def SetPauseSimulation(self, pause): -+ """ -+ Allows to pause and unpause the simulation -+ """ -+ if self.IsConnected() == False: -+ return -+ if self._isPaused == bool(pause): -+ return -+ self._isPaused = bool(pause) -+ servermanager.vtkSMPropertyHelper(self._InProxy, "SimulationPaused").Set(self._isPaused) -+ self._InProxy.UpdateVTKObjects() -+ if not self._isPaused: -+ self._InProxy.InvokeCommand("LiveChanged") -+ -+ def SetLoopDelay(self, delay): -+ self._delay = delay -+ _EventChecker.TimerDelay = delay -+ -+ def AddUpdateFunction(self, func): -+ """ -+ Add a function that will be called, when the catalyst data is updated. -+ This function will be called without any arguments. -+ """ -+ self.updatefunctions.append(func) -+ -+ def LengthUpdateFunctions(self): -+ """ -+ Number of functions that will be called, when the catalyst data is updated. -+ """ -+ return len(self.updatefunctions) -+ -+ def RemoveUpdateFunction(self, func): -+ """ -+ Remove a function that would be called, when the catalyst data is updated. -+ """ -+ return self.updatefunctions.remove(func) -+ -+ def ClearUpdateFunctions(self): -+ """ -+ Remove all functions that would be called, when the catalyst data is updated. -+ """ -+ self.updatefunctions = [] -+ -+ -+ -+ -+ class __EventChecker: -+ def __init__(self, delay = 0.01): -+ self.TimerDelay = delay # Delay between calls to check nam for new events -+ self._Running = False -+ self._session = servermanager.ProxyManager().GetSession() -+ self._nam = servermanager.vtkProcessModule.GetProcessModule().GetNetworkAccessManager() -+ -+ def CheckForUpdates(self): -+ """ -+ Calls processEvents(), till all events are processed -+ """ -+ if self._session is None: -+ return -+ if isinstance(self._session, servermanager.vtkSMSessionClient): -+ if not self._session.IsNotBusy(): -+ return -+ while self._nam.ProcessEvents(1): -+ temp = self._nam.ProcessEvents(1) -+ -+ def _RegularChecks(self): -+ """ -+ starts an infinte loop, regularily calling CheckForUpdates(), needs a valid value set for self.timerDelay -+ """ -+ if self._Running: -+ return False -+ self._Running = True -+ self._doRegularChecks = True -+ while self._doRegularChecks: -+ sleep(self.TimerDelay) -+ self.CheckForUpdates() -+ self._Running = False -+ -+ def StartRegularChecks(self): -+ """ -+ Starts a seperate thread to do regular checks in the background -+ """ -+ thread = Thread(target=self._RegularChecks, args=()) -+ thread.daemon=True -+ thread.start() -+ - # ----------------------------------------------------------------------------- - - class _funcs_internals: diff --git a/Golden_Repo/p/Perl/Perl-5.32.0-GCCcore-10.3.0.eb b/Golden_Repo/p/Perl/Perl-5.32.0-GCCcore-10.3.0.eb deleted file mode 100644 index 8bbaae86810be1b1262797e3a4309fc294cd1547..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Perl/Perl-5.32.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,1508 +0,0 @@ -name = 'Perl' -version = '5.32.0' - -homepage = 'https://www.perl.org/' -description = """Larry Wall's Practical Extraction and Report Language""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://www.cpan.org/src/%(version_major)s.0'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['efeb1ce1f10824190ad1cadbcccf6fdb8a5d37007d0100d2d9ae5f2b5900c0b4'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), # for Net::SSLeay - ('expat', '2.2.9'), # for XML::Parser - ('libxml2', '2.9.10'), # for XML::LibXML -] - -# OpenSSL is required for Net::SSLeay -osdependencies = ['openssl', ('openssl-devel', 'libssl-dev', - 'libopenssl-devel')] - -# !! order of extensions is important !! -# extensions updated on July 24th 2020 -exts_list = [ - ('Config::General', '2.63', { - 'source_tmpl': 'Config-General-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TL/TLINDEN'], - 'checksums': ['0a9bf977b8aabe76343e88095d2296c8a422410fd2a05a1901f2b20e2e1f6fad'], - }), - ('File::Listing', '6.04', { - 'source_tmpl': 'File-Listing-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['1e0050fcd6789a2179ec0db282bf1e90fb92be35d1171588bd9c47d52d959cf5'], - }), - ('ExtUtils::InstallPaths', '0.012', { - 'source_tmpl': 'ExtUtils-InstallPaths-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['84735e3037bab1fdffa3c2508567ad412a785c91599db3c12593a50a1dd434ed'], - }), - ('ExtUtils::Helpers', '0.026', { - 'source_tmpl': 'ExtUtils-Helpers-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['de901b6790a4557cf4ec908149e035783b125bf115eb9640feb1bc1c24c33416'], - }), - ('Test::Harness', '3.43_02', { - 'source_tmpl': 'Test-Harness-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['019be59351a277c483f2bfb872294016c5831b67193825d5519dc773e11cc63e'], - }), - ('ExtUtils::Config', '0.008', { - 'source_tmpl': 'ExtUtils-Config-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['ae5104f634650dce8a79b7ed13fb59d67a39c213a6776cfdaa3ee749e62f1a8c'], - }), - ('Module::Build::Tiny', '0.039', { - 'source_tmpl': 'Module-Build-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['7d580ff6ace0cbe555bf36b86dc8ea232581530cbeaaea09bccb57b55797f11c'], - }), - ('aliased', '0.34', { - 'source_tmpl': 'aliased-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['c350524507cd827fab864e5d4c2cc350b1babaa12fa95aec0ca00843fcc7deeb'], - }), - ('Text::Glob', '0.11', { - 'source_tmpl': 'Text-Glob-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RC/RCLAMP'], - 'checksums': ['069ccd49d3f0a2dedb115f4bdc9fbac07a83592840953d1fcdfc39eb9d305287'], - }), - ('Regexp::Common', '2017060201', { - 'source_tmpl': 'Regexp-Common-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AB/ABIGAIL'], - 'checksums': ['ee07853aee06f310e040b6bf1a0199a18d81896d3219b9b35c9630d0eb69089b'], - }), - ('GO::Utils', '0.15', { - 'source_tmpl': 'go-perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CM/CMUNGALL'], - 'checksums': ['423d26155ee85ca51ab2270cee59f4e85b193e57ac3a29aff827298c0a396b12'], - }), - ('Module::Pluggable', '5.2', { - 'source_tmpl': 'Module-Pluggable-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SI/SIMONW'], - 'checksums': ['b3f2ad45e4fd10b3fb90d912d78d8b795ab295480db56dc64e86b9fa75c5a6df'], - }), - ('Test::Fatal', '0.014', { - 'source_tmpl': 'Test-Fatal-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['bcdcef5c7b2790a187ebca810b0a08221a63256062cfab3c3b98685d91d1cbb0'], - }), - ('Test::Warnings', '0.030', { - 'source_tmpl': 'Test-Warnings-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['89a4947ddf1564ae01122275584433d7f6c4370370bcf3768922d796956ae24f'], - }), - ('File::ShareDir', '1.116', { - 'source_tmpl': 'File-ShareDir-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['59d90bfdf98c4656ff4173e62954ea8cf0de66565e35d108ecd7050596cb8328'], - }), - ('File::ShareDir::Install', '0.13', { - 'source_tmpl': 'File-ShareDir-Install-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['45befdf0d95cbefe7c25a1daf293d85f780d6d2576146546e6828aad26e580f9'], - }), - ('DateTime::Locale', '1.26', { - 'source_tmpl': 'DateTime-Locale-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['1e43152a7ca4585a335958da0dd5e430b95a820f6de6456df26e7ec2a6649792'], - }), - ('DateTime::TimeZone', '2.39', { - 'source_tmpl': 'DateTime-TimeZone-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['65a49083bf465b42c6a65df575efaceb87b5ba5a997d4e91e6ddba57190c8fca'], - }), - ('Test::Requires', '0.11', { - 'source_tmpl': 'Test-Requires-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TOKUHIROM'], - 'checksums': ['4b88de549597eecddf7c3c38a4d0204a16f59ad804577b671896ac04e24e040f'], - }), - ('Module::Implementation', '0.09', { - 'source_tmpl': 'Module-Implementation-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['c15f1a12f0c2130c9efff3c2e1afe5887b08ccd033bd132186d1e7d5087fd66d'], - }), - ('Module::Build', '0.4231', { - 'source_tmpl': 'Module-Build-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['7e0f4c692c1740c1ac84ea14d7ea3d8bc798b2fb26c09877229e04f430b2b717'], - }), - ('Module::Runtime', '0.016', { - 'source_tmpl': 'Module-Runtime-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/Z/ZE/ZEFRAM'], - 'checksums': ['68302ec646833547d410be28e09676db75006f4aa58a11f3bdb44ffe99f0f024'], - }), - ('Try::Tiny', '0.30', { - 'source_tmpl': 'Try-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['da5bd0d5c903519bbf10bb9ba0cb7bcac0563882bcfe4503aee3fb143eddef6b'], - }), - ('Params::Validate', '1.29', { - 'source_tmpl': 'Params-Validate-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['49a68dfb430bea028042479111d19068e08095e5a467e320b7ab7bde3d729733'], - }), - ('List::MoreUtils', '0.428', { - 'source_tmpl': 'List-MoreUtils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['713e0945d5f16e62d81d5f3da2b6a7b14a4ce439f6d3a7de74df1fd166476cc2'], - }), - ('Exporter::Tiny', '1.002002', { - 'source_tmpl': 'Exporter-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TOBYINK'], - 'checksums': ['00f0b95716b18157132c6c118ded8ba31392563d19e490433e9a65382e707101'], - }), - ('Class::Singleton', '1.5', { - 'source_tmpl': 'Class-Singleton-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHAY'], - 'checksums': ['38220d04f02e3a803193c2575a1644cce0b95ad4b95c19eb932b94e2647ef678'], - }), - ('DateTime', '1.52', { - 'source_tmpl': 'DateTime-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['67828b6172b23d9534b8f2a593b05caa6240737a3b2246f3e1fe4e015dbc3dfc'], - }), - ('File::Find::Rule::Perl', '1.15', { - 'source_tmpl': 'File-Find-Rule-Perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['9a48433f86e08ce18e03526e2982de52162eb909d19735460f07eefcaf463ea6'], - }), - ('Readonly', '2.05', { - 'source_tmpl': 'Readonly-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SA/SANKO'], - 'checksums': ['4b23542491af010d44a5c7c861244738acc74ababae6b8838d354dfb19462b5e'], - }), - ('Git', '0.42', { - 'source_tmpl': 'Git-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MS/MSOUTH'], - 'checksums': ['9469a9f398f3a2bf2b0500566ee41d3ff6fae460412a137185767a1cc4783a6d'], - }), - ('Tree::DAG_Node', '1.31', { - 'source_tmpl': 'Tree-DAG_Node-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['1c8ba69772568b3758054247097512c550efe31517c329fb65eef7afccc9d304'], - }), - ('Template', '3.009', { - 'source_tmpl': 'Template-Toolkit-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AT/ATOOMIC'], - 'checksums': ['d6ad23bbf637a59b5dfd1ac006460dfcb185982e4852cde77150fbd085f1f5b6'], - }), - ('DBI', '1.643', { - 'source_tmpl': 'DBI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TI/TIMB'], - 'checksums': ['8a2b993db560a2c373c174ee976a51027dd780ec766ae17620c20393d2e836fa'], - }), - ('DBD::SQLite', '1.65_02', { - 'source_tmpl': 'DBD-SQLite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IS/ISHIGAKI'], - 'checksums': ['f4139bebc246943f7b5504b03996b8d367e788410c55f7b03238889244d56da0'], - }), - ('Math::Bezier', '0.01', { - 'source_tmpl': 'Math-Bezier-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AB/ABW'], - 'checksums': ['11a815fc45fdf0efabb1822ab77faad8b9eea162572c5f0940c8ed7d56e6b8b8'], - }), - ('Archive::Extract', '0.86', { - 'source_tmpl': 'Archive-Extract-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['9acd09cdb8e8cf0b6d08210a3b80342300c89a359855319bf6b00c14c4aab687'], - }), - ('DBIx::Simple', '1.37', { - 'source_tmpl': 'DBIx-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JU/JUERD'], - 'checksums': ['46d311aa2ce08907401c56119658426dbb044c5a40de73d9a7b79bf50390cae3'], - }), - ('Shell', '0.73', { - 'source_tmpl': 'Shell-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/F/FE/FERREIRA'], - 'checksums': ['f7dbebf65261ed0e5abd0f57052b64d665a1a830bab4c8bbc220f235bd39caf5'], - }), - ('File::Spec', '3.75', { - 'source_tmpl': 'PathTools-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/X/XS/XSAWYERX'], - 'checksums': ['a558503aa6b1f8c727c0073339081a77888606aa701ada1ad62dd9d8c3f945a2'], - }), - ('Test::Simple', '1.302175', { - 'source_tmpl': 'Test-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['c8c8f5c51ad6d7a858c3b61b8b658d8e789d3da5d300065df0633875b0075e49'], - }), - ('Set::Scalar', '1.29', { - 'source_tmpl': 'Set-Scalar-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAVIDO'], - 'checksums': ['a3dc1526f3dde72d3c64ea00007b86ce608cdcd93567cf6e6e42dc10fdc4511d'], - }), - ('IO::Stringy', '2.111', { - 'source_tmpl': 'IO-stringy-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DS/DSKOLL'], - 'checksums': ['8c67fd6608c3c4e74f7324f1404a856c331dbf48d9deda6aaa8296ea41bf199d'], - }), - ('Encode::Locale', '1.05', { - 'source_tmpl': 'Encode-Locale-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['176fa02771f542a4efb1dbc2a4c928e8f4391bf4078473bd6040d8f11adb0ec1'], - }), - ('XML::SAX::Base', '1.09', { - 'source_tmpl': 'XML-SAX-Base-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GR/GRANTM'], - 'checksums': ['66cb355ba4ef47c10ca738bd35999723644386ac853abbeb5132841f5e8a2ad0'], - }), - ('XML::NamespaceSupport', '1.12_9', { - 'source_tmpl': 'XML-NamespaceSupport-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PERIGRIN'], - 'checksums': ['2e84a057f0a8c845a612d212742cb94fca4fc8a433150b5721bd448f77d1e4a9'], - }), - ('XML::SAX', '1.02', { - 'source_tmpl': 'XML-SAX-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GR/GRANTM'], - 'checksums': ['4506c387043aa6a77b455f00f57409f3720aa7e553495ab2535263b4ed1ea12a'], - }), - ('Test::LeakTrace', '0.16', { - 'source_tmpl': 'Test-LeakTrace-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEEJO'], - 'checksums': ['5f089eed915f1ec8c743f6d2777c3ecd0ca01df2f7b9e10038d316952583e403'], - }), - ('Test::Exception', '0.43', { - 'source_tmpl': 'Test-Exception-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['156b13f07764f766d8b45a43728f2439af81a3512625438deab783b7883eb533'], - }), - ('Text::Aligner', '0.16', { - 'source_tmpl': 'Text-Aligner-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['5c857dbce586f57fa3d7c4ebd320023ab3b2963b2049428ae01bd3bc4f215725'], - }), - ('Text::Table', '1.134', { - 'source_tmpl': 'Text-Table-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['bacf429b18b7c0b22c088219055063e3902749531d488ebd7b17eab7757cd10b'], - }), - ('MIME::Types', '2.17', { - 'source_tmpl': 'MIME-Types-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MARKOV'], - 'checksums': ['e04ed7d42f1ff3150a303805f2689c28f80b92c511784d4641cb7f040d3e8ff6'], - }), - ('File::Copy::Recursive', '0.45', { - 'source_tmpl': 'File-Copy-Recursive-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DM/DMUEY'], - 'checksums': ['d3971cf78a8345e38042b208bb7b39cb695080386af629f4a04ffd6549df1157'], - }), - ('Cwd::Guard', '0.05', { - 'source_tmpl': 'Cwd-Guard-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KA/KAZEBURO'], - 'checksums': ['7afc7ca2b9502e440241938ad97a3e7ebd550180ebd6142e1db394186b268e77'], - }), - ('Capture::Tiny', '0.48', { - 'source_tmpl': 'Capture-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['6c23113e87bad393308c90a207013e505f659274736638d8c79bac9c67cc3e19'], - }), - ('File::Copy::Recursive::Reduced', '0.006', { - 'source_tmpl': 'File-Copy-Recursive-Reduced-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JK/JKEENAN'], - 'checksums': ['e618f993a69f4355205c58fffff6982609f28b47f646ec6e244e41b5c6707e2c'], - }), - ('Module::Build::XSUtil', '0.19', { - 'source_tmpl': 'Module-Build-XSUtil-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HI/HIDEAKIO'], - 'checksums': ['9063b3c346edeb422807ffe49ffb23038c4f900d4a77b845ce4b53d97bf29400'], - }), - ('Tie::Function', '0.02', { - 'source_tmpl': 'Tie-Function-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAVIDNICO/handy_tied_functions'], - 'checksums': ['0b1617af218dfab911ba0fbd72210529a246efe140332da77fe3e03d11000117'], - }), - ('Template::Plugin::Number::Format', '1.06', { - 'source_tmpl': 'Template-Plugin-Number-Format-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DARREN'], - 'checksums': ['0865836a1bcbc34d4a0ee34b5ccc14d7b511f1fd300bf390f002dac349539843'], - }), - ('HTML::Parser', '3.72', { - 'source_tmpl': 'HTML-Parser-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['ec28c7e1d9e67c45eca197077f7cdc41ead1bb4c538c7f02a3296a4bb92f608b'], - }), - ('Date::Handler', '1.2', { - 'source_tmpl': 'Date-Handler-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BB/BBEAUSEJ'], - 'checksums': ['c36fd2b68d48c2e17417bf2873c78820f3ae02460fdf5976b8eeab887d59e16c'], - }), - ('Params::Util', '1.07', { - 'source_tmpl': 'Params-Util-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AD/ADAMK'], - 'checksums': ['30f1ec3f2cf9ff66ae96f973333f23c5f558915bb6266881eac7423f52d7c76c'], - }), - ('IO::HTML', '1.001', { - 'source_tmpl': 'IO-HTML-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CJ/CJM'], - 'checksums': ['ea78d2d743794adc028bc9589538eb867174b4e165d7d8b5f63486e6b828e7e0'], - }), - ('Data::Grove', '0.08', { - 'source_tmpl': 'libxml-perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KM/KMACLEOD'], - 'checksums': ['4571059b7b5d48b7ce52b01389e95d798bf5cf2020523c153ff27b498153c9cb'], - }), - ('Class::ISA', '0.36', { - 'source_tmpl': 'Class-ISA-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SM/SMUELLER'], - 'checksums': ['8816f34e9a38e849a10df756030dccf9fe061a196c11ac3faafd7113c929b964'], - }), - ('URI', '1.76', { - 'source_tmpl': 'URI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['b2c98e1d50d6f572483ee538a6f4ccc8d9185f91f0073fd8af7390898254413e'], - }), - ('Ima::DBI', '0.35', { - 'source_tmpl': 'Ima-DBI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PERRIN'], - 'checksums': ['8b481ceedbf0ae4a83effb80581550008bfdd3885ef01145e3733c7097c00a08'], - }), - ('Tie::IxHash', '1.23', { - 'source_tmpl': 'Tie-IxHash-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CH/CHORNY'], - 'checksums': ['fabb0b8c97e67c9b34b6cc18ed66f6c5e01c55b257dcf007555e0b027d4caf56'], - }), - ('GO', '0.04', { - 'source_tmpl': 'go-db-perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SJ/SJCARBON'], - 'checksums': ['8eb73d591ad767e7cf26def40cffd84833875f1ad51e456960b9ed73dc23641b'], - }), - ('Class::DBI::SQLite', '0.11', { - 'source_tmpl': 'Class-DBI-SQLite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA'], - 'checksums': ['c4661b00afb7e53c97ac36e13f34dde43c1a93540a2f4ff97e6182b0c731e4e7'], - }), - ('Pod::POM', '2.01', { - 'source_tmpl': 'Pod-POM-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/N/NE/NEILB'], - 'checksums': ['1b50fba9bbdde3ead192beeba0eaddd0c614e3afb1743fa6fff805f57c56f7f4'], - }), - ('Math::Round', '0.07', { - 'source_tmpl': 'Math-Round-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GR/GROMMEL'], - 'checksums': ['73a7329a86e54a5c29a440382e5803095b58f33129e61a1df0093b4824de9327'], - }), - ('Text::Diff', '1.45', { - 'source_tmpl': 'Text-Diff-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/N/NE/NEILB'], - 'checksums': ['e8baa07b1b3f53e00af3636898bbf73aec9a0ff38f94536ede1dbe96ef086f04'], - }), - ('Log::Message::Simple', '0.10', { - 'source_tmpl': 'Log-Message-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['aa12d1a4c0ac260b94d448fa01feba242a8a85cb6cbfdc66432e3b5b468add96'], - }), - ('Net::SSLeay', '1.85', { - 'source_tmpl': 'Net-SSLeay-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIKEM'], - 'checksums': ['9d8188b9fb1cae3bd791979c20554925d5e94a138d00414f1a6814549927b0c8'], - }), - ('IO::Socket::SSL', '2.068', { - 'source_tmpl': 'IO-Socket-SSL-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SU/SULLR'], - 'checksums': ['4420fc0056f1827b4dd1245eacca0da56e2182b4ef6fc078f107dc43c3fb8ff9'], - }), - ('Fennec::Lite', '0.004', { - 'source_tmpl': 'Fennec-Lite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['dce28e3932762c2ff92aa52d90405c06e898e81cb7b164ccae8966ae77f1dcab'], - }), - ('Sub::Uplevel', '0.2800', { - 'source_tmpl': 'Sub-Uplevel-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['b4f3f63b80f680a421332d8851ddbe5a8e72fcaa74d5d1d98f3c8cc4a3ece293'], - }), - ('Meta::Builder', '0.004', { - 'source_tmpl': 'Meta-Builder-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['acb499aa7206eb9db21eb85357a74521bfe3bdae4a6416d50a7c75b939cf56fe'], - }), - ('Exporter::Declare', '0.114', { - 'source_tmpl': 'Exporter-Declare-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['4bd70d6ca76f6f6ba7e4c618d4ac93b8593a58f1233ccbe18b10f5f204f1d4e4'], - }), - ('Getopt::Long', '2.51', { - 'source_tmpl': 'Getopt-Long-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JV/JV'], - 'checksums': ['20b43b94c2f4096e9e05c213d6184d7391567f127631d69e9b1ffd994d4cc564'], - }), - ('Log::Message', '0.08', { - 'source_tmpl': 'Log-Message-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['bd697dd62aaf26d118e9f0a0813429deb1c544e4501559879b61fcbdfe99fe46'], - }), - ('Mouse', 'v2.5.9', { - 'source_tmpl': 'Mouse-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SK/SKAJI'], - 'checksums': ['b9d78d46ef10b3c9e284a3f81381c91a0f94e7202cb11f514edb1dda4db50c73'], - }), - ('Test::Version', '2.09', { - 'source_tmpl': 'Test-Version-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['9ce1dd2897a5f30e1b7f8966ec66f57d8d8f280f605f28c7ca221fa79aca38e0'], - }), - ('DBIx::Admin::TableInfo', '3.03', { - 'source_tmpl': 'DBIx-Admin-TableInfo-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['a852530f95957a43aa794f2edf5f3fe4ecec35bd20150c38136d4c23d85328b6'], - }), - ('Net::HTTP', '6.19', { - 'source_tmpl': 'Net-HTTP-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['52b76ec13959522cae64d965f15da3d99dcb445eddd85d2ce4e4f4df385b2fc4'], - }), - ('Test::Deep', '1.130', { - 'source_tmpl': 'Test-Deep-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['4064f494f5f62587d0ae501ca439105821ee5846c687dc6503233f55300a7c56'], - }), - ('Test::Warn', '0.36', { - 'source_tmpl': 'Test-Warn-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BIGJ'], - 'checksums': ['ecbca346d379cef8d3c0e4ac0c8eb3b2613d737ffaaeae52271c38d7bf3c6cda'], - }), - ('MRO::Compat', '0.13', { - 'source_tmpl': 'MRO-Compat-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['8a2c3b6ccc19328d5579d02a7d91285e2afd85d801f49d423a8eb16f323da4f8'], - }), - ('Moo', '2.004000', { - 'source_tmpl': 'Moo-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['323240d000394cf38ec42e865b05cb8928f625c82c9391cd2cdc72b33c51b834'], - }), - ('Clone::Choose', '0.010', { - 'source_tmpl': 'Clone-Choose-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HE/HERMES'], - 'checksums': ['5623481f58cee8edb96cd202aad0df5622d427e5f748b253851dfd62e5123632'], - }), - ('Hash::Merge', '0.300', { - 'source_tmpl': 'Hash-Merge-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['402fd52191d51415bb7163b7673fb4a108e3156493d7df931b8db4b2af757c40'], - }), - ('SQL::Abstract', '1.87', { - 'source_tmpl': 'SQL-Abstract-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IL/ILMARI'], - 'checksums': ['e926a0a83da7efa18e57e5b2952a2ab3b7563a51733fc6dd5c89f12156481c4a'], - }), - ('HTML::Form', '6.03', { - 'source_tmpl': 'HTML-Form-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['68c01d94f005d5ca9c4d55ad2a1bf3a8d034a5fc6db187d91a4c42f3fdc9fc36'], - }), - ('Number::Compare', '0.03', { - 'source_tmpl': 'Number-Compare-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RC/RCLAMP'], - 'checksums': ['83293737e803b43112830443fb5208ec5208a2e6ea512ed54ef8e4dd2b880827'], - }), - ('IPC::Run', '20200505.0', { - 'source_tmpl': 'IPC-Run-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TODDR'], - 'checksums': ['816ebf217fa0df99c583d73c0acc6ced78ac773787c664c75cbf140bb7e4c901'], - }), - ('HTML::Entities::Interpolate', '1.10', { - 'source_tmpl': 'HTML-Entities-Interpolate-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['f15a9df92c282419f7010964aca1ada844ddfae7afc735cd2ba1bb20883e955c'], - }), - ('File::Remove', '1.58', { - 'source_tmpl': 'File-Remove-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['81f6ec83acab8ba042afe904334a26eb3a56c217bdb9981d237a89ab072fd0d8'], - }), - ('YAML::Tiny', '1.73', { - 'source_tmpl': 'YAML-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['bc315fa12e8f1e3ee5e2f430d90b708a5dc7e47c867dba8dce3a6b8fbe257744'], - }), - ('Module::Install', '1.19', { - 'source_tmpl': 'Module-Install-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['1a53a78ddf3ab9e3c03fc5e354b436319a944cba4281baf0b904fa932a13011b'], - }), - ('Config::Tiny', '2.24', { - 'source_tmpl': 'Config-Tiny-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['1064948e4bc57e86e318dbc8791c53ca5b9d95b958cc474367c3277981135232'], - }), - ('Test::ClassAPI', '1.07', { - 'source_tmpl': 'Test-ClassAPI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['30e9dbfc5e0cc2ee14eae8f3465a908a710daecbd0a3ebdb2888fc4504fa18aa'], - }), - ('Test::Most', '0.37', { - 'source_tmpl': 'Test-Most-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OV/OVID'], - 'checksums': ['533370141eb9f18cf4ac380f6ded2ab57802a6e184008a80fd2304bfcc474fc7'], - }), - ('Class::Accessor', '0.51', { - 'source_tmpl': 'Class-Accessor-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KA/KASEI'], - 'checksums': ['bf12a3e5de5a2c6e8a447b364f4f5a050bf74624c56e315022ae7992ff2f411c'], - }), - ('Test::Differences', '0.67', { - 'source_tmpl': 'Test-Differences-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DC/DCANTRELL'], - 'checksums': ['c88dbbb48b934b069284874f33abbaaa438aa31204aa3fa73bfc2f4aeac878da'], - }), - ('HTTP::Tiny', '0.076', { - 'source_tmpl': 'HTTP-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['ddbdaa2fb511339fa621a80021bf1b9733fddafc4fe0245f26c8b92171ef9387'], - }), - ('Package::DeprecationManager', '0.17', { - 'source_tmpl': 'Package-DeprecationManager-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['1d743ada482b5c9871d894966e87d4c20edc96931bb949fb2638b000ddd6684b'], - }), - ('Digest::SHA1', '2.13', { - 'source_tmpl': 'Digest-SHA1-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['68c1dac2187421f0eb7abf71452a06f190181b8fc4b28ededf5b90296fb943cc'], - }), - ('Date::Language', '2.30', { - 'source_tmpl': 'TimeDate-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GB/GBARR'], - 'checksums': ['75bd254871cb5853a6aa0403ac0be270cdd75c9d1b6639f18ecba63c15298e86'], - }), - ('version', '0.9924', { - 'source_tmpl': 'version-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JP/JPEACOCK'], - 'checksums': ['81e4485ff3faf9b7813584d57b557f4b34e73b6c2eb696394f6deefacf5ca65b'], - }), - ('Sub::Uplevel', '0.2800', { - 'source_tmpl': 'Sub-Uplevel-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['b4f3f63b80f680a421332d8851ddbe5a8e72fcaa74d5d1d98f3c8cc4a3ece293'], - }), - ('XML::Bare', '0.53', { - 'source_tmpl': 'XML-Bare-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CO/CODECHILD'], - 'checksums': ['865e198e98d904be1683ef5a53a4948f02dabdacde59fc554a082ffbcc5baefd'], - }), - ('Dist::CheckConflicts', '0.11', { - 'source_tmpl': 'Dist-CheckConflicts-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DO/DOY'], - 'checksums': ['ea844b9686c94d666d9d444321d764490b2cde2f985c4165b4c2c77665caedc4'], - }), - ('Sub::Name', '0.26', { - 'source_tmpl': 'Sub-Name-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['2d2f2d697d516c89547e7c4307f1e79441641cae2c7395e7319b306d390df105'], - }), - ('Time::Piece', '1.3401', { - 'source_tmpl': 'Time-Piece-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ES/ESAYM'], - 'checksums': ['4b55b7bb0eab45cf239a54dfead277dfa06121a43e63b3fce0853aecfdb04c27'], - }), - ('Digest::HMAC', '1.03', { - 'source_tmpl': 'Digest-HMAC-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['3bc72c6d3ff144d73aefb90e9a78d33612d58cf1cd1631ecfb8985ba96da4a59'], - }), - ('HTTP::Negotiate', '6.01', { - 'source_tmpl': 'HTTP-Negotiate-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['1c729c1ea63100e878405cda7d66f9adfd3ed4f1d6cacaca0ee9152df728e016'], - }), - ('MIME::Lite', '3.031', { - 'source_tmpl': 'MIME-Lite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['f1235866482b67f00858b3edaa4ff4cf909ef900f1d15d889948bf9c03a591e0'], - }), - ('Crypt::Rijndael', '1.14', { - 'source_tmpl': 'Crypt-Rijndael-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['6451c3dffe8703523be2bb08d1adca97e77df2a8a4dd46944d18a99330b7850e'], - }), - ('B::Lint', '1.20', { - 'source_tmpl': 'B-Lint-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['dc49408964fd8b7963859c92e013f0b9f92f74be5a7c2a78e3996279827c10b3'], - }), - ('Canary::Stability', '2013', { - 'source_tmpl': 'Canary-Stability-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/ML/MLEHMANN'], - 'checksums': ['a5c91c62cf95fcb868f60eab5c832908f6905221013fea2bce3ff57046d7b6ea'], - }), - ('AnyEvent', '7.17', { - 'source_tmpl': 'AnyEvent-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/ML/MLEHMANN'], - 'checksums': ['50beea689c098fe4aaeb83806c40b9fe7f946d5769acf99f849f099091a4b985'], - }), - ('Object::Accessor', '0.48', { - 'source_tmpl': 'Object-Accessor-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['76cb824a27b6b4e560409fcf6fd5b3bfbbd38b72f1f3d37ed0b54bd9c0baeade'], - }), - ('Data::UUID', '1.224', { - 'source_tmpl': 'Data-UUID-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['c958c17502bbef5eccf40c040874895747f7f9b9f9e5192c18688370981bf47c'], - }), - ('Test::Pod', '1.52', { - 'source_tmpl': 'Test-Pod-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['60a8dbcc60168bf1daa5cc2350236df9343e9878f4ab9830970a5dde6fe8e5fc'], - }), - ('AppConfig', '1.71', { - 'source_tmpl': 'AppConfig-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/N/NE/NEILB'], - 'checksums': ['1177027025ecb09ee64d9f9f255615c04db5e14f7536c344af632032eb887b0f'], - }), - ('Net::SMTP::SSL', '1.04', { - 'source_tmpl': 'Net-SMTP-SSL-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['7b29c45add19d3d5084b751f7ba89a8e40479a446ce21cfd9cc741e558332a00'], - }), - ('XML::Tiny', '2.07', { - 'source_tmpl': 'XML-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DC/DCANTRELL'], - 'checksums': ['ce39fcb53e0fe9f1cbcd86ddf152e1db48566266b70ec0769ef364eeabdd8941'], - }), - ('HTML::Tagset', '3.20', { - 'source_tmpl': 'HTML-Tagset-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PETDANCE'], - 'checksums': ['adb17dac9e36cd011f5243881c9739417fd102fce760f8de4e9be4c7131108e2'], - }), - ('HTML::Tree', '5.07', { - 'source_tmpl': 'HTML-Tree-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KE/KENTNL'], - 'checksums': ['f0374db84731c204b86c1d5b90975fef0d30a86bd9def919343e554e31a9dbbf'], - }), - ('Devel::GlobalDestruction', '0.14', { - 'source_tmpl': 'Devel-GlobalDestruction-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['34b8a5f29991311468fe6913cadaba75fd5d2b0b3ee3bb41fe5b53efab9154ab'], - }), - ('WWW::RobotRules', '6.02', { - 'source_tmpl': 'WWW-RobotRules-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['46b502e7a288d559429891eeb5d979461dd3ecc6a5c491ead85d165b6e03a51e'], - }), - ('Expect', '1.35', { - 'source_tmpl': 'Expect-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JA/JACOBY'], - 'checksums': ['09d92761421decd495853103379165a99efbf452c720f30277602cf23679fd06'], - }), - ('Term::UI', '0.46', { - 'source_tmpl': 'Term-UI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['91946c80d7f4aab0ca4bfedc3bbe0a75b37cab1a29bd7bca3b3b7456d417e9a6'], - }), - ('Net::SNMP', 'v6.0.1', { - 'source_tmpl': 'Net-SNMP-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DT/DTOWN'], - 'checksums': ['14c37bc1cbb3f3cdc7d6c13e0f27a859f14cdcfd5ea54a0467a88bc259b0b741'], - }), - ('XML::Filter::BufferText', '1.01', { - 'source_tmpl': 'XML-Filter-BufferText-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RB/RBERJON'], - 'checksums': ['8fd2126d3beec554df852919f4739e689202cbba6a17506e9b66ea165841a75c'], - }), - ('XML::SAX::Writer', '0.57', { - 'source_tmpl': 'XML-SAX-Writer-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PERIGRIN'], - 'checksums': ['3d61d07ef43b0126f5b4de4f415a256fa859fa88dc4fdabaad70b7be7c682cf0'], - }), - ('Statistics::Descriptive', '3.0702', { - 'source_tmpl': 'Statistics-Descriptive-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['f98a10c625640170cdda408cccc72bdd7f66f8ebe5f59dec1b96185171ef11d0'], - }), - ('Class::Load', '0.25', { - 'source_tmpl': 'Class-Load-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['2a48fa779b5297e56156380e8b32637c6c58decb4f4a7f3c7350523e11275f8f'], - }), - ('LWP::Simple', '6.46', { - 'source_tmpl': 'libwww-perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['7a713375d0e02ad8238a5d58340ade10466fe209f752fd62fd182c173c4423c4'], - }), - ('Time::Piece::MySQL', '0.06', { - 'source_tmpl': 'Time-Piece-MySQL-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KA/KASEI'], - 'checksums': ['319601feec17fae344988a5ee91cfc6a0bcfe742af77dba254724c3268b2a60f'], - }), - ('Package::Stash::XS', '0.29', { - 'source_tmpl': 'Package-Stash-XS-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['d3676ba94641e03d6a30e951f09266c4c3ca3f5b58aa7b314a67f28e419878aa'], - }), - ('Set::Array', '0.30', { - 'source_tmpl': 'Set-Array-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['d9f024c8e3637feccdebcf6479b6754b6c92f1209f567feaf0c23818af31ee3c'], - }), - ('boolean', '0.46', { - 'source_tmpl': 'boolean-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IN/INGY'], - 'checksums': ['95c088085c3e83bf680fe6ce16d8264ec26310490f7d1680e416ea7a118f156a'], - }), - ('Number::Format', '1.75', { - 'source_tmpl': 'Number-Format-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/W/WR/WRW'], - 'checksums': ['82d659cb16461764fd44d11a9ce9e6a4f5e8767dc1069eb03467c6e55de257f3'], - }), - ('Data::Stag', '0.14', { - 'source_tmpl': 'Data-Stag-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CM/CMUNGALL'], - 'checksums': ['4ab122508d2fb86d171a15f4006e5cf896d5facfa65219c0b243a89906258e59'], - }), - ('Test::NoWarnings', '1.04', { - 'source_tmpl': 'Test-NoWarnings-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AD/ADAMK'], - 'checksums': ['638a57658cb119af1fe5b15e73d47c2544dcfef84af0c6b1b2e97f08202b686c'], - }), - ('Crypt::DES', '2.07', { - 'source_tmpl': 'Crypt-DES-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DP/DPARIS'], - 'checksums': ['2db1ebb5837b4cb20051c0ee5b733b4453e3137df0a92306034c867621edd7e7'], - }), - ('Exporter', '5.74', { - 'source_tmpl': 'Exporter-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TODDR'], - 'checksums': ['eadb889ef673ad940da6aa4f6f7d75fc1e625ae786ae3533fd313eaf629945b8'], - }), - ('Class::Inspector', '1.36', { - 'source_tmpl': 'Class-Inspector-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['cc295d23a472687c24489d58226ead23b9fdc2588e522f0b5f0747741700694e'], - }), - ('Parse::RecDescent', '1.967015', { - 'source_tmpl': 'Parse-RecDescent-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JT/JTBRAUN'], - 'checksums': ['1943336a4cb54f1788a733f0827c0c55db4310d5eae15e542639c9dd85656e37'], - }), - ('Carp', '1.50', { - 'source_tmpl': 'Carp-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/X/XS/XSAWYERX'], - 'checksums': ['f5273b4e1a6d51b22996c48cb3a3cbc72fd456c4038f5c20b127e2d4bcbcebd9'], - }), - ('XML::Parser', '2.46', { - 'source_tmpl': 'XML-Parser-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TODDR'], - 'checksums': ['d331332491c51cccfb4cb94ffc44f9cd73378e618498d4a37df9e043661c515d'], - }), - ('XML::XPath', '1.44', { - 'source_tmpl': 'XML-XPath-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MANWAR'], - 'checksums': ['1cc9110705165dc09dd09974dd7c0b6709c9351d6b6b1cef5a711055f891dd0f'], - }), - ('JSON', '4.02', { - 'source_tmpl': 'JSON-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IS/ISHIGAKI'], - 'checksums': ['444a88755a89ffa2a5424ab4ed1d11dca61808ebef57e81243424619a9e8627c'], - }), - ('Sub::Exporter', '0.987', { - 'source_tmpl': 'Sub-Exporter-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['543cb2e803ab913d44272c7da6a70bb62c19e467f3b12aaac4c9523259b083d6'], - }), - ('Class::Load::XS', '0.10', { - 'source_tmpl': 'Class-Load-XS-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['5bc22cf536ebfd2564c5bdaf42f0d8a4cee3d1930fc8b44b7d4a42038622add1'], - }), - ('Set::IntSpan::Fast', '1.15', { - 'source_tmpl': 'Set-IntSpan-Fast-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AN/ANDYA'], - 'checksums': ['cfb1768c24f55208e87405b17f537f0f303fa141891d0b22d509a941aa57e24e'], - }), - ('Sub::Exporter::Progressive', '0.001013', { - 'source_tmpl': 'Sub-Exporter-Progressive-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/F/FR/FREW'], - 'checksums': ['d535b7954d64da1ac1305b1fadf98202769e3599376854b2ced90c382beac056'], - }), - ('Data::Dumper::Concise', '2.023', { - 'source_tmpl': 'Data-Dumper-Concise-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['a6c22f113caf31137590def1b7028a7e718eface3228272d0672c25e035d5853'], - }), - ('File::Slurp::Tiny', '0.004', { - 'source_tmpl': 'File-Slurp-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['452995beeabf0e923e65fdc627a725dbb12c9e10c00d8018c16d10ba62757f1e'], - }), - ('Algorithm::Diff', '1.1903', { - 'source_tmpl': 'Algorithm-Diff-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TY/TYEMQ'], - 'checksums': ['30e84ac4b31d40b66293f7b1221331c5a50561a39d580d85004d9c1fff991751'], - }), - ('AnyData', '0.12', { - 'source_tmpl': 'AnyData-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['be6a957f04a2feba9b305536b132deceba1f455db295b221a63e75567fadbcfc'], - }), - ('Text::Iconv', '1.7', { - 'source_tmpl': 'Text-Iconv-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MP/MPIOTR'], - 'checksums': ['5b80b7d5e709d34393bcba88971864a17b44a5bf0f9e4bcee383d029e7d2d5c3'], - }), - ('Class::Data::Inheritable', '0.08', { - 'source_tmpl': 'Class-Data-Inheritable-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TM/TMTM'], - 'checksums': ['9967feceea15227e442ec818723163eb6d73b8947e31f16ab806f6e2391af14a'], - }), - ('Text::Balanced', '2.03', { - 'source_tmpl': 'Text-Balanced-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHAY'], - 'checksums': ['057753f8f0568b53921f66a60a89c30092b73329bcc61a2c43339ab70c9792c8'], - }), - ('strictures', '2.000006', { - 'source_tmpl': 'strictures-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['09d57974a6d1b2380c802870fed471108f51170da81458e2751859f2714f8d57'], - }), - ('Switch', '2.17', { - 'source_tmpl': 'Switch-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CH/CHORNY'], - 'checksums': ['31354975140fe6235ac130a109496491ad33dd42f9c62189e23f49f75f936d75'], - }), - ('File::Which', '1.23', { - 'source_tmpl': 'File-Which-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['b79dc2244b2d97b6f27167fc3b7799ef61a179040f3abd76ce1e0a3b0bc4e078'], - }), - ('Email::Date::Format', '1.005', { - 'source_tmpl': 'Email-Date-Format-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['579c617e303b9d874411c7b61b46b59d36f815718625074ae6832e7bb9db5104'], - }), - ('Error', '0.17029', { - 'source_tmpl': 'Error-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['1a23f7913032aed6d4b68321373a3899ca66590f4727391a091ec19c95bf7adc'], - }), - ('Mock::Quick', '1.111', { - 'source_tmpl': 'Mock-Quick-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['ff786008bf8c022064ececd3b7ed89c76b35e8d1eac6cf472a9f51771c1c9f2c'], - }), - ('Text::CSV', '2.00', { - 'source_tmpl': 'Text-CSV-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IS/ISHIGAKI'], - 'checksums': ['8ccbd9195805222d995844114d0e595bb24ce188f85284dbf256080311cbb2c2'], - }), - ('Test::Output', '1.031', { - 'source_tmpl': 'Test-Output-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BD/BDFOY'], - 'checksums': ['f8b8f37185717872727d06f6c078fa77db794410faf2f6da4d37b0b7650f7ea4'], - }), - ('Class::DBI', 'v3.0.17', { - 'source_tmpl': 'Class-DBI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TM/TMTM'], - 'checksums': ['541354fe361c56850cb11261f6ca089a14573fa764792447444ff736ae626206'], - }), - ('List::SomeUtils', '0.56', { - 'source_tmpl': 'List-SomeUtils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['eaa7d99ce86380c0389876474c8eb84acc0a6bfeef1b0fc23a292592de6f89f7'], - }), - ('List::UtilsBy', '0.11', { - 'source_tmpl': 'List-UtilsBy-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PEVANS'], - 'checksums': ['faddf43b4bc21db8e4c0e89a26e5f23fe626cde3491ec651b6aa338627f5775a'], - }), - ('List::AllUtils', '0.16', { - 'source_tmpl': 'List-AllUtils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['559b3aa911c73003a3a1ebd860d3b16e171137de8203d86be63a2390364c63dd'], - }), - ('UNIVERSAL::moniker', '0.08', { - 'source_tmpl': 'UNIVERSAL-moniker-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KA/KASEI'], - 'checksums': ['94ce27a546cd57cb52e080a8f2533a7cc2350028388582485bd1039a37871f9c'], - }), - ('Exception::Class', '1.44', { - 'source_tmpl': 'Exception-Class-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['33f3fbf8b138d3b04ea4ec0ba83fb0df6ba898806bcf4ef393d4cafc1a23ee0d'], - }), - ('File::CheckTree', '4.42', { - 'source_tmpl': 'File-CheckTree-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['66fb417f8ff8a5e5b7ea25606156e70e204861c59fa8c3831925b4dd3f155f8a'], - }), - ('Math::VecStat', '0.08', { - 'source_tmpl': 'Math-VecStat-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AS/ASPINELLI'], - 'checksums': ['409a8e0e4b1025c8e80f628f65a9778aa77ab285161406ca4a6c097b13656d0d'], - }), - ('Pod::LaTeX', '0.61', { - 'source_tmpl': 'Pod-LaTeX-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TJ/TJENNESS'], - 'checksums': ['15a840ea1c8a76cd3c865fbbf2fec33b03615c0daa50f9c800c54e0cf0659d46'], - }), - ('Eval::Closure', '0.14', { - 'source_tmpl': 'Eval-Closure-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DO/DOY'], - 'checksums': ['ea0944f2f5ec98d895bef6d503e6e4a376fea6383a6bc64c7670d46ff2218cad'], - }), - ('HTTP::Request', '6.18', { - 'source_tmpl': 'HTTP-Message-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['d060d170d388b694c58c14f4d13ed908a2807f0e581146cef45726641d809112'], - }), - ('XML::Twig', '3.52', { - 'source_tmpl': 'XML-Twig-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIROD'], - 'checksums': ['fef75826c24f2b877d0a0d2645212fc4fb9756ed4d2711614ac15c497e8680ad'], - }), - ('IO::String', '1.08', { - 'source_tmpl': 'IO-String-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['2a3f4ad8442d9070780e58ef43722d19d1ee21a803bf7c8206877a10482de5a0'], - }), - ('XML::Simple', '2.25', { - 'source_tmpl': 'XML-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GR/GRANTM'], - 'checksums': ['531fddaebea2416743eb5c4fdfab028f502123d9a220405a4100e68fc480dbf8'], - }), - ('Sub::Install', '0.928', { - 'source_tmpl': 'Sub-Install-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['61e567a7679588887b7b86d427bc476ea6d77fffe7e0d17d640f89007d98ef0f'], - }), - ('HTTP::Cookies', '6.08', { - 'source_tmpl': 'HTTP-Cookies-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['49ebb73576eb41063c04bc079477df094496deec805ae033f3be338c23c3af59'], - }), - ('Pod::Plainer', '1.04', { - 'source_tmpl': 'Pod-Plainer-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RM/RMBARKER'], - 'checksums': ['1bbfbf7d1d4871e5a83bab2137e22d089078206815190eb1d5c1260a3499456f'], - }), - ('Test::Exception::LessClever', '0.009', { - 'source_tmpl': 'Test-Exception-LessClever-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['3b2731a44956a11f74b46b3ecf0734fab651e1c0bcf120f8b407aa1b4d43ac34'], - }), - ('LWP::MediaTypes', '6.02', { - 'source_tmpl': 'LWP-MediaTypes-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['18790b0cc5f0a51468495c3847b16738f785a2d460403595001e0b932e5db676'], - }), - ('Scalar::List::Utils', '1.55', { - 'modulename': 'List::Util', - 'source_tmpl': 'Scalar-List-Utils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PEVANS'], - 'checksums': ['4d2bdc1c72a7bc4d69d6a5cc85bc7566497c3b183c6175b832784329d58feb4b'], - }), - ('Data::Section::Simple', '0.07', { - 'source_tmpl': 'Data-Section-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA'], - 'checksums': ['0b3035ffdb909aa1f7ded6b608fa9d894421c82c097d51e7171170d67579a9cb'], - }), - ('Class::Trigger', '0.15', { - 'source_tmpl': 'Class-Trigger-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA'], - 'checksums': ['b7a878d44dea67d64df2ca18020d9d868a95596debd16f1a264874209332b07f'], - }), - ('HTTP::Daemon', '6.01', { - 'source_tmpl': 'HTTP-Daemon-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['43fd867742701a3f9fcc7bd59838ab72c6490c0ebaf66901068ec6997514adc2'], - }), - ('File::HomeDir', '1.004', { - 'source_tmpl': 'File-HomeDir-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['45f67e2bb5e60a7970d080e8f02079732e5a8dfc0c7c3cbdb29abfb3f9f791ad'], - }), - ('HTTP::Date', '6.02', { - 'source_tmpl': 'HTTP-Date-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['e8b9941da0f9f0c9c01068401a5e81341f0e3707d1c754f8e11f42a7e629e333'], - }), - ('Authen::SASL', '2.16', { - 'source_tmpl': 'Authen-SASL-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GB/GBARR'], - 'checksums': ['6614fa7518f094f853741b63c73f3627168c5d3aca89b1d02b1016dc32854e09'], - }), - ('Clone', '0.42', { - 'source_tmpl': 'Clone-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GARU'], - 'checksums': ['54a930db8f178321cf201da040442d198e8c18a77e7fcabb578e460b6acb07e5'], - }), - ('Data::Types', '0.17', { - 'source_tmpl': 'Data-Types-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MANWAR'], - 'checksums': ['860751feb79b7dfc1af71c4b7fe920220ec6d31c4ab9402b8f178f7f4b8293c1'], - }), - ('Import::Into', '1.002005', { - 'source_tmpl': 'Import-Into-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['bd9e77a3fb662b40b43b18d3280cd352edf9fad8d94283e518181cc1ce9f0567'], - }), - ('DateTime::Tiny', '1.07', { - 'source_tmpl': 'DateTime-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['83568a22838cb518fbeb9e060460ec7f59d5a0b0a1cc06562954c3674d7cf7e4'], - }), - ('DBD::AnyData', '0.110', { - 'source_tmpl': 'DBD-AnyData-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['247f0d88e55076fd3f6c7bf3fd527989d62fcc1ef9bde9bf2ee11c280adcaeab'], - }), - ('Text::Format', '0.61', { - 'source_tmpl': 'Text-Format-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['bb8a3b8ff515c85101baf553a769337f944a05cde81f111ae78aff416bf4ae2b'], - }), - ('Devel::CheckCompiler', '0.07', { - 'source_tmpl': 'Devel-CheckCompiler-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SY/SYOHEX'], - 'checksums': ['768b7697b4b8d4d372c7507b65e9dd26aa4223f7100183bbb4d3af46d43869b5'], - }), - ('Log::Handler', '0.90', { - 'source_tmpl': 'Log-Handler-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BL/BLOONIX'], - 'checksums': ['3a5c80e7128454770f83acab8cbd3e70e5ec3d59a61dc32792a178f0b31bf74d'], - }), - ('DBIx::ContextualFetch', '1.03', { - 'source_tmpl': 'DBIx-ContextualFetch-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TM/TMTM'], - 'checksums': ['85e2f805bfc81cd738c294316b27a515397036f397a0ff1c6c8d754c38530306'], - }), - ('Devel::StackTrace', '2.04', { - 'source_tmpl': 'Devel-StackTrace-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['cd3c03ed547d3d42c61fa5814c98296139392e7971c092e09a431f2c9f5d6855'], - }), - ('Term::ReadKey', '2.38', { - 'source_tmpl': 'TermReadKey-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JS/JSTOWE'], - 'checksums': ['5a645878dc570ac33661581fbb090ff24ebce17d43ea53fd22e105a856a47290'], - }), - ('Set::IntSpan', '1.19', { - 'source_tmpl': 'Set-IntSpan-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SW/SWMCD'], - 'checksums': ['11b7549b13ec5d87cc695dd4c777cd02983dd5fe9866012877fb530f48b3dfd0'], - }), - ('Moose', '2.2013', { - 'source_tmpl': 'Moose-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['df74dc78088921178edf72d827017d6c92737c986659f2dadc533ae24675e77c'], - }), - ('Algorithm::Dependency', '1.112', { - 'source_tmpl': 'Algorithm-Dependency-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['7e0fb7c39f56a2dccf9d0295c82f3031ee116e807f6a12a438fa4dd41b0ec187'], - }), - ('Font::TTF', '1.06', { - 'source_tmpl': 'Font-TTF-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BH/BHALLISSY'], - 'checksums': ['4b697d444259759ea02d2c442c9bffe5ffe14c9214084a01f743693a944cc293'], - }), - ('IPC::Run3', '0.048', { - 'source_tmpl': 'IPC-Run3-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['3d81c3cc1b5cff69cca9361e2c6e38df0352251ae7b41e2ff3febc850e463565'], - }), - ('File::Find::Rule', '0.34', { - 'source_tmpl': 'File-Find-Rule-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RC/RCLAMP'], - 'checksums': ['7e6f16cc33eb1f29ff25bee51d513f4b8a84947bbfa18edb2d3cc40a2d64cafe'], - }), - ('SQL::Statement', '1.412', { - 'source_tmpl': 'SQL-Statement-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['65c870883379c11b53f19ead10aaac241ccc86a90bbab77f6376fe750720e5c8'], - }), - ('File::Slurp', '9999.32', { - 'source_tmpl': 'File-Slurp-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CA/CAPOEIRAB'], - 'checksums': ['4c3c21992a9d42be3a79dd74a3c83d27d38057269d65509a2f555ea0fb2bc5b0'], - }), - ('Package::Stash', '0.38', { - 'source_tmpl': 'Package-Stash-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['c58ee8844df2dda38e3bf66fdf443439aaefaef1a33940edf2055f0afd223a7f'], - }), - ('Data::OptList', '0.110', { - 'source_tmpl': 'Data-OptList-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['366117cb2966473f2559f2f4575ff6ae69e84c69a0f30a0773e1b51a457ef5c3'], - }), - ('Package::Constants', '0.06', { - 'source_tmpl': 'Package-Constants-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['0b58be78706ccc4e4bd9bbad41767470427fd7b2cfad749489de101f85bc5df5'], - }), - ('CPANPLUS', '0.9908', { - 'source_tmpl': 'CPANPLUS-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['58f6acb47d791ed8e30a6ebcc25089218bacad96e46da8e621a92b778c569dd4'], - }), - ('IO::Tty', '1.14', { - 'source_tmpl': 'IO-Tty-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TODDR'], - 'checksums': ['51f3e4e311128bdb2c6a15f02c51376cb852ccf9df9bebe8dfbb5f9561eb95b5'], - }), - ('Text::Soundex', '3.05', { - 'source_tmpl': 'Text-Soundex-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['f6dd55b4280b25dea978221839864382560074e1d6933395faee2510c2db60ed'], - }), - ('Lingua::EN::PluralToSingular', '0.21', { - 'source_tmpl': 'Lingua-EN-PluralToSingular-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BK/BKB'], - 'checksums': ['f8a8b7de28c25c96190d7f48c90b5ad9b9bf517f3835c77641f0e8fa546c0d1d'], - }), - ('Want', '0.29', { - 'source_tmpl': 'Want-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RO/ROBIN'], - 'checksums': ['b4e4740b8d4cb783591273c636bd68304892e28d89e88abf9273b1de17f552f7'], - }), - ('Bundle::BioPerl', '2.1.9', { - 'source_tmpl': 'Bundle-BioPerl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CJ/CJFIELDS'], - 'checksums': ['c343ba97f49d86e7fb14aef4cfe3124992e2a5c3168e53a54606dd611d73e5c7'], - }), - ('Mail::Util', '2.21', { - 'source_tmpl': 'MailTools-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MARKOV'], - 'checksums': ['4ad9bd6826b6f03a2727332466b1b7d29890c8d99a32b4b3b0a8d926ee1a44cb'], - }), - ('Text::Template', '1.59', { - 'source_tmpl': 'Text-Template-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MS/MSCHOUT'], - 'checksums': ['1dd2c788c05303ed9a970e1881109642151fa93e02c7a80d4c70608276bab1ee'], - }), - ('PDF::API2', '2.037', { - 'source_tmpl': 'PDF-API2-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SS/SSIMMS'], - 'checksums': ['142803d1886d2a2919d374fb6c25681630aa26740e3f8023337f996fa6c6297e'], - }), - ('Devel::CheckLib', '1.14', { - 'source_tmpl': 'Devel-CheckLib-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MATTN'], - 'checksums': ['f21c5e299ad3ce0fdc0cb0f41378dca85a70e8d6c9a7599f0e56a957200ec294'], - }), - ('SVG', '2.85', { - 'source_tmpl': 'SVG-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MANWAR'], - 'checksums': ['159ec81f3236175957c9a4e911cb0e3715dc5b658144c8a5418b772768a1477c'], - }), - ('Statistics::Basic', '1.6611', { - 'source_tmpl': 'Statistics-Basic-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JE/JETTERO'], - 'checksums': ['6855ce5615fd3e1af4cfc451a9bf44ff29a3140b4e7130034f1f0af2511a94fb'], - }), - ('Log::Log4perl', '1.49', { - 'source_tmpl': 'Log-Log4perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MS/MSCHILLI'], - 'checksums': ['b739187f519146cb6bebcfc427c64b1f4138b35c5f4c96f46a21ed4a43872e16'], - }), - ('Math::CDF', '0.1', { - 'source_tmpl': 'Math-CDF-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CA/CALLAHAN'], - 'checksums': ['7896bf250835ce47dcc813cb8cf9dc576c5455de42e822dcd7d8d3fef2125565'], - }), - ('Array::Utils', '0.5', { - 'source_tmpl': 'Array-Utils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/Z/ZM/ZMIJ/Array'], - 'checksums': ['89dd1b7fcd9b4379492a3a77496e39fe6cd379b773fd03a6b160dd26ede63770'], - }), - ('File::Grep', '0.02', { - 'source_tmpl': 'File-Grep-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MN/MNEYLON'], - 'checksums': ['462e15274eb6278521407ea302d9eea7252cd44cab2382871f7de833d5f85632'], - }), - ('File::Path', '2.17', { - 'source_tmpl': 'File-Path-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JK/JKEENAN'], - 'checksums': ['8c506dfd69a70fdd5f1212fe58fbc53620a89a8293e2ac6860570f868269fb31'], - }), - ('File::Slurper', '0.012', { - 'source_tmpl': 'File-Slurper-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['4efb2ea416b110a1bda6f8133549cc6ea3676402e3caf7529fce0313250aa578'], - }), - ('File::Temp', '0.2309', { - 'source_tmpl': 'File-Temp-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['01b242a9888d155db981aa0a9891ce2c9e439f0e4bbff4dbf17ca4997be6235f'], - }), - ('Graph', '0.9704', { - 'source_tmpl': 'Graph-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JH/JHI'], - 'checksums': ['325e8eb07be2d09a909e450c13d3a42dcb2a2e96cc3ac780fe4572a0d80b2a25'], - }), - ('Graph::ReadWrite', '2.09', { - 'source_tmpl': 'Graph-ReadWrite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/N/NE/NEILB'], - 'checksums': ['b01ef06ce922eea12d5ce614d63ddc5f3ee7ad0d05f9577051d3f87a89799a4a'], - }), - ('PerlIO::utf8_strict', '0.007', { - 'source_tmpl': 'PerlIO-utf8_strict-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['83a33f2fe046cb3ad6afc80790635a423e2c7c6854afacc6998cd46951cc81cb'], - }), - ('Devel::OverloadInfo', '0.005', { - 'source_tmpl': 'Devel-OverloadInfo-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IL/ILMARI'], - 'checksums': ['8bfde2ffa47c9946f8adc8cfc445c2f97b8d1cdd678111bee9f444e82f7aa6e7'], - }), - ('Sub::Identify', '0.14', { - 'source_tmpl': 'Sub-Identify-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RG/RGARCIA'], - 'checksums': ['068d272086514dd1e842b6a40b1bedbafee63900e5b08890ef6700039defad6f'], - }), - ('Digest::MD5::File', '0.08', { - 'source_tmpl': 'Digest-MD5-File-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DM/DMUEY'], - 'checksums': ['adb43a54e32627b4f7e57c9640e6eb06d0bb79d8ea54cd0bd79ed35688fb1218'], - }), - ('String::RewritePrefix', '0.008', { - 'source_tmpl': 'String-RewritePrefix-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['e45a31d6914e8f5fc722ef48d8819400dafc02105e0c61414aabbf01bce208eb'], - }), - ('Getopt::Long::Descriptive', '0.105', { - 'source_tmpl': 'Getopt-Long-Descriptive-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['a71cdbcf4043588b26a42a13d151c243f6eccf38e8fc0b18ffb5b53651ab8c15'], - }), - ('App::Cmd', '0.331', { - 'source_tmpl': 'App-Cmd-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['4a5d3df0006bd278880d01f4957aaa652a8f91fe8f66e93adf70fba0c3ecb680'], - }), - ('Path::Tiny', '0.108', { - 'source_tmpl': 'Path-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['3c49482be2b3eb7ddd7e73a5b90cff648393f5d5de334ff126ce7a3632723ff5'], - }), - ('Carp::Clan', '6.08', { - 'source_tmpl': 'Carp-Clan-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['c75f92e34422cc5a65ab05d155842b701452434e9aefb649d6e2289c47ef6708'], - }), - ('Sub::Exporter::ForMethods', '0.100052', { - 'source_tmpl': 'Sub-Exporter-ForMethods-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['421fbba4f6ffcf13c4335f2c20630d709e6fa659c07545d094dbc5a558ad3006'], - }), - ('MooseX::Types', '0.50', { - 'source_tmpl': 'MooseX-Types-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['9cd87b3492cbf0be9d2df9317b2adf9fc30663770e69906654bea3f41b17cb08'], - }), - ('B::Hooks::EndOfScope', '0.24', { - 'source_tmpl': 'B-Hooks-EndOfScope-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['03aa3dfe5d0aa6471a96f43fe8318179d19794d4a640708f0288f9216ec7acc6'], - }), - ('namespace::clean', '0.27', { - 'source_tmpl': 'namespace-clean-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RI/RIBASUSHI'], - 'checksums': ['8a10a83c3e183dc78f9e7b7aa4d09b47c11fb4e7d3a33b9a12912fd22e31af9d'], - }), - ('namespace::autoclean', '0.29', { - 'source_tmpl': 'namespace-autoclean-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['45ebd8e64a54a86f88d8e01ae55212967c8aa8fed57e814085def7608ac65804'], - }), - ('File::pushd', '1.016', { - 'source_tmpl': 'File-pushd-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['d73a7f09442983b098260df3df7a832a5f660773a313ca273fa8b56665f97cdc'], - }), - ('MooseX::Types::Perl', '0.101343', { - 'source_tmpl': 'MooseX-Types-Perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['f084beaf3c33209c68d05d4dbc24c25d604a6458b9738d96dceb086c8ef1325a'], - }), - ('Role::Tiny', '2.001004', { - 'source_tmpl': 'Role-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['92ba5712850a74102c93c942eb6e7f62f7a4f8f483734ed289d08b324c281687'], - }), - ('Specio', '0.46', { - 'source_tmpl': 'Specio-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['0bf42aa116076d6efc18f72b72c7acb5638bd41c0aa09aecc12fc8bf9ceb9596'], - }), - ('Params::ValidationCompiler', '0.30', { - 'source_tmpl': 'Params-ValidationCompiler-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['dc5bee23383be42765073db284bed9fbd819d4705ad649c20b644452090d16cb'], - }), - ('Log::Dispatch', '2.70', { - 'source_tmpl': 'Log-Dispatch-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['a3d91cc52467d3a3c6683103f3df4472d71e405a45f553289448713ac4293f21'], - }), - ('String::Flogger', '1.101245', { - 'source_tmpl': 'String-Flogger-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['aa03c08e01f802a358c175c6093c02adf9688659a087a8ddefdc3e9cef72640b'], - }), - ('Log::Dispatchouli', '2.019', { - 'source_tmpl': 'Log-Dispatchouli-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['fe8890c553c8f72b8b55633067270862c34db2ab8ff1cf8db0855f0427c23442'], - }), - ('Data::Section', '0.200007', { - 'source_tmpl': 'Data-Section-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['cd937e5b70e34aab885ff414e2a6d19e4783b7c28fc3cda5145b230514ebb4de'], - }), - ('Software::License', '0.103014', { - 'source_tmpl': 'Software-License-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['eb45ea602d75006683789fbba57a01c0a1f7037371de95ea54b91577535d1789'], - }), - ('MooseX::SetOnce', '0.200002', { - 'source_tmpl': 'MooseX-SetOnce-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['cbed06b7fcd353f0d99bf80a87c1c0b581160697231b3ad9a608da231ba2b659'], - }), - ('Term::Encoding', '0.03', { - 'source_tmpl': 'Term-Encoding-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA'], - 'checksums': ['95ba9687d735d25a3cbe64508d7894f009c7fa2a1726c3e786e9e21da2251d0b'], - }), - ('Config::MVP', '2.200011', { - 'source_tmpl': 'Config-MVP-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['23c95666fc43c4adaebcc093b1b56091efc2a6aa2d75366a216d18eda96ad716'], - }), - ('Throwable', '0.200013', { - 'source_tmpl': 'Throwable-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['9987d0deb5bddd352a6330cefbe932f882e36dd8c8a4564bcfd372dc396b8fa0'], - }), - ('Sub::Quote', '2.006006', { - 'source_tmpl': 'Sub-Quote-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['6e4e2af42388fa6d2609e0e82417de7cc6be47223f576592c656c73c7524d89d'], - }), - ('Role::Identifiable::HasIdent', '0.007', { - 'source_tmpl': 'Role-Identifiable-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['561346d1a1a07a45bd851d859a825a7f67925a7a3ba5ba58e0cdad8bb99073ad'], - }), - ('Role::HasMessage', '0.006', { - 'source_tmpl': 'Role-HasMessage-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['f6a6dbe0476ff95ee1ffbef825eb18d9b02b0618deba4686e7c63b99d576d4d3'], - }), - ('MooseX::OneArgNew', '0.005', { - 'source_tmpl': 'MooseX-OneArgNew-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['7e4fcf474ea6c4244f0885f1066729cfdc472fbd7190dd41b4b55bcd67c3103f'], - }), - ('MooseX::Role::Parameterized', '1.11', { - 'source_tmpl': 'MooseX-Role-Parameterized-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['1cfe766c5d7f0ecab57f733dcca430a2a2acd6b995757141b940ade3692bec9e'], - }), - ('MooseX::LazyRequire', '0.11', { - 'source_tmpl': 'MooseX-LazyRequire-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['ef620c1e019daf9cf3f23a943d25a94c91e93ab312bcd63be2e9740ec0b94288'], - }), - ('Mixin::Linewise::Readers', '0.108', { - 'source_tmpl': 'Mixin-Linewise-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['7df20678474c0973930a472b0c55e3f8e85b7790b68ab18ef618f9c453c8aef2'], - }), - ('Config::INI', '0.025', { - 'source_tmpl': 'Config-INI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['628bf76d5b91f89dde22d4813ec033026ebf71b772bb61ccda909da00c869732'], - }), - ('String::Truncate', '1.100602', { - 'source_tmpl': 'String-Truncate-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['aaa3d4eec01136921484139133eb75d5c571fe51b0ad329f089e6d469a235f6e'], - }), - ('Pod::Eventual', '0.094001', { - 'source_tmpl': 'Pod-Eventual-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['be9fb8910b108e5d1a66f002b659ad22576e88d779b703dff9d15122c3f80834'], - }), - ('Pod::Elemental', '0.103005', { - 'source_tmpl': 'Pod-Elemental-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['824336ec18326e3b970e7815922b3921b0a821d2ee0e50b0c5b2bc327f99615e'], - }), - ('Pod::Weaver', '4.015', { - 'source_tmpl': 'Pod-Weaver-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['5af25b29a55783e495a9df5ef6293240e2c9ab02764613d79f1ed50b12dec5ae'], - }), - ('Dist::Zilla', '6.015', { - 'source_tmpl': 'Dist-Zilla-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['2288e5819f942c2f7051fae265b8e3084c3ce155d4f409751396784366ab891b'], - }), - ('XML::RegExp', '0.04', { - 'source_tmpl': 'XML-RegExp-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TJ/TJMATHER'], - 'checksums': ['df1990096036085c8e2d45904fe180f82bfed40f1a7e05243f334ea10090fc54'], - }), - ('XML::DOM', '1.46', { - 'source_tmpl': 'XML-DOM-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TJ/TJMATHER'], - 'checksums': ['8ba24b0b459b01d6c5e5b0408829c7d5dfe47ff79b3548c813759048099b175e'], - }), - ('Data::Dump', '1.23', { - 'source_tmpl': 'Data-Dump-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['af53b05ef1387b4cab4427e6789179283e4f0da8cf036e8db516ddb344512b65'], - }), - ('File::Next', '1.18', { - 'source_tmpl': 'File-Next-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PETDANCE'], - 'checksums': ['f900cb39505eb6e168a9ca51a10b73f1bbde1914b923a09ecd72d9c02e6ec2ef'], - }), - ('App::cpanminus', '1.7044', { - 'source_tmpl': 'App-cpanminus-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/'], - 'checksums': ['9b60767fe40752ef7a9d3f13f19060a63389a5c23acc3e9827e19b75500f81f3'], - }), - # Below this point are packages added for JSC, not included in the upstream easybuild repositories - ('Alien::Base', '2.26', { - 'source_tmpl': 'Alien-Build-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['b104566f2c82e12913a276f1485270bb5f5b6bcb6ff9dbe3d098e99d8d74cf71'], - }), - ('File::chdir', '0.1011', { - 'source_tmpl': 'File-chdir-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['31ebf912df48d5d681def74b9880d78b1f3aca4351a0ed1fe3570b8e03af6c79'], - }), - ('Alien::Libxml2', '0.16', { - 'source_tmpl': 'Alien-Libxml2-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['56f06a58054f788dcef8d3b6669fb47d172e9ca0b7a12d0241d9cf7835a53b97'], - }), - ('XML::LibXML', '2.0205', { - 'source_tmpl': 'XML-LibXML-%(version)s.tar.gz', - 'source_urls': ['http://search.cpan.org/CPAN/authors/id/S/SH/SHLOMIF'], - 'checksums': ['3a25002714b13f192d0baef5dc25ad2fbf09f8ec4ad1f793dec8fe6e2f5b2278'], - }), - ('Date::Calc', '6.4', { - 'source_tmpl': 'Date-Calc-%(version)s.tar.gz', - 'source_urls': ['http://search.cpan.org/CPAN/authors/id/S/ST/STBEY'], - 'checksums': ['7ce137b2e797b7c0901f3adf1a05a19343356cd1f04676aa1c56a9f624f859ad'], - }), -] - -moduleclass = 'lang' diff --git a/Golden_Repo/p/Perl/Perl-5.32.0-GCCcore-9.3.0.eb b/Golden_Repo/p/Perl/Perl-5.32.0-GCCcore-9.3.0.eb deleted file mode 100644 index 7f6225dcb00cd4df889901027f505c4cd7660bac..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Perl/Perl-5.32.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,1513 +0,0 @@ -name = 'Perl' -version = '5.32.0' - -homepage = 'https://www.perl.org/' -description = """Larry Wall's Practical Extraction and Report Language""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://www.cpan.org/src/%(version_major)s.0'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['efeb1ce1f10824190ad1cadbcccf6fdb8a5d37007d0100d2d9ae5f2b5900c0b4'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), # for Net::SSLeay - ('expat', '2.2.9'), # for XML::Parser - ('libxml2', '2.9.10'), # for XML::LibXML -] - -# OpenSSL is required for Net::SSLeay -osdependencies = ['openssl', ('openssl-devel', 'libssl-dev', - 'libopenssl-devel')] - -# !! order of extensions is important !! -# extensions updated on July 24th 2020 -exts_list = [ - ('Config::General', '2.63', { - 'source_tmpl': 'Config-General-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TL/TLINDEN'], - 'checksums': ['0a9bf977b8aabe76343e88095d2296c8a422410fd2a05a1901f2b20e2e1f6fad'], - }), - ('File::Listing', '6.04', { - 'source_tmpl': 'File-Listing-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['1e0050fcd6789a2179ec0db282bf1e90fb92be35d1171588bd9c47d52d959cf5'], - }), - ('ExtUtils::InstallPaths', '0.012', { - 'source_tmpl': 'ExtUtils-InstallPaths-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['84735e3037bab1fdffa3c2508567ad412a785c91599db3c12593a50a1dd434ed'], - }), - ('ExtUtils::Helpers', '0.026', { - 'source_tmpl': 'ExtUtils-Helpers-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['de901b6790a4557cf4ec908149e035783b125bf115eb9640feb1bc1c24c33416'], - }), - ('Test::Harness', '3.43_02', { - 'source_tmpl': 'Test-Harness-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['019be59351a277c483f2bfb872294016c5831b67193825d5519dc773e11cc63e'], - }), - ('ExtUtils::Config', '0.008', { - 'source_tmpl': 'ExtUtils-Config-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['ae5104f634650dce8a79b7ed13fb59d67a39c213a6776cfdaa3ee749e62f1a8c'], - }), - ('Module::Build::Tiny', '0.039', { - 'source_tmpl': 'Module-Build-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['7d580ff6ace0cbe555bf36b86dc8ea232581530cbeaaea09bccb57b55797f11c'], - }), - ('aliased', '0.34', { - 'source_tmpl': 'aliased-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['c350524507cd827fab864e5d4c2cc350b1babaa12fa95aec0ca00843fcc7deeb'], - }), - ('Text::Glob', '0.11', { - 'source_tmpl': 'Text-Glob-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RC/RCLAMP'], - 'checksums': ['069ccd49d3f0a2dedb115f4bdc9fbac07a83592840953d1fcdfc39eb9d305287'], - }), - ('Regexp::Common', '2017060201', { - 'source_tmpl': 'Regexp-Common-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AB/ABIGAIL'], - 'checksums': ['ee07853aee06f310e040b6bf1a0199a18d81896d3219b9b35c9630d0eb69089b'], - }), - ('GO::Utils', '0.15', { - 'source_tmpl': 'go-perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CM/CMUNGALL'], - 'checksums': ['423d26155ee85ca51ab2270cee59f4e85b193e57ac3a29aff827298c0a396b12'], - }), - ('Module::Pluggable', '5.2', { - 'source_tmpl': 'Module-Pluggable-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SI/SIMONW'], - 'checksums': ['b3f2ad45e4fd10b3fb90d912d78d8b795ab295480db56dc64e86b9fa75c5a6df'], - }), - ('Test::Fatal', '0.014', { - 'source_tmpl': 'Test-Fatal-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['bcdcef5c7b2790a187ebca810b0a08221a63256062cfab3c3b98685d91d1cbb0'], - }), - ('Test::Warnings', '0.030', { - 'source_tmpl': 'Test-Warnings-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['89a4947ddf1564ae01122275584433d7f6c4370370bcf3768922d796956ae24f'], - }), - ('File::ShareDir', '1.116', { - 'source_tmpl': 'File-ShareDir-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['59d90bfdf98c4656ff4173e62954ea8cf0de66565e35d108ecd7050596cb8328'], - }), - ('File::ShareDir::Install', '0.13', { - 'source_tmpl': 'File-ShareDir-Install-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['45befdf0d95cbefe7c25a1daf293d85f780d6d2576146546e6828aad26e580f9'], - }), - ('DateTime::Locale', '1.26', { - 'source_tmpl': 'DateTime-Locale-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['1e43152a7ca4585a335958da0dd5e430b95a820f6de6456df26e7ec2a6649792'], - }), - ('DateTime::TimeZone', '2.39', { - 'source_tmpl': 'DateTime-TimeZone-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['65a49083bf465b42c6a65df575efaceb87b5ba5a997d4e91e6ddba57190c8fca'], - }), - ('Test::Requires', '0.11', { - 'source_tmpl': 'Test-Requires-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TOKUHIROM'], - 'checksums': ['4b88de549597eecddf7c3c38a4d0204a16f59ad804577b671896ac04e24e040f'], - }), - ('Module::Implementation', '0.09', { - 'source_tmpl': 'Module-Implementation-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['c15f1a12f0c2130c9efff3c2e1afe5887b08ccd033bd132186d1e7d5087fd66d'], - }), - ('Module::Build', '0.4231', { - 'source_tmpl': 'Module-Build-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['7e0f4c692c1740c1ac84ea14d7ea3d8bc798b2fb26c09877229e04f430b2b717'], - }), - ('Module::Runtime', '0.016', { - 'source_tmpl': 'Module-Runtime-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/Z/ZE/ZEFRAM'], - 'checksums': ['68302ec646833547d410be28e09676db75006f4aa58a11f3bdb44ffe99f0f024'], - }), - ('Try::Tiny', '0.30', { - 'source_tmpl': 'Try-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['da5bd0d5c903519bbf10bb9ba0cb7bcac0563882bcfe4503aee3fb143eddef6b'], - }), - ('Params::Validate', '1.29', { - 'source_tmpl': 'Params-Validate-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['49a68dfb430bea028042479111d19068e08095e5a467e320b7ab7bde3d729733'], - }), - ('List::MoreUtils', '0.428', { - 'source_tmpl': 'List-MoreUtils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['713e0945d5f16e62d81d5f3da2b6a7b14a4ce439f6d3a7de74df1fd166476cc2'], - }), - ('Exporter::Tiny', '1.002002', { - 'source_tmpl': 'Exporter-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TOBYINK'], - 'checksums': ['00f0b95716b18157132c6c118ded8ba31392563d19e490433e9a65382e707101'], - }), - ('Class::Singleton', '1.5', { - 'source_tmpl': 'Class-Singleton-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHAY'], - 'checksums': ['38220d04f02e3a803193c2575a1644cce0b95ad4b95c19eb932b94e2647ef678'], - }), - ('DateTime', '1.52', { - 'source_tmpl': 'DateTime-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['67828b6172b23d9534b8f2a593b05caa6240737a3b2246f3e1fe4e015dbc3dfc'], - }), - ('File::Find::Rule::Perl', '1.15', { - 'source_tmpl': 'File-Find-Rule-Perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['9a48433f86e08ce18e03526e2982de52162eb909d19735460f07eefcaf463ea6'], - }), - ('Readonly', '2.05', { - 'source_tmpl': 'Readonly-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SA/SANKO'], - 'checksums': ['4b23542491af010d44a5c7c861244738acc74ababae6b8838d354dfb19462b5e'], - }), - ('Git', '0.42', { - 'source_tmpl': 'Git-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MS/MSOUTH'], - 'checksums': ['9469a9f398f3a2bf2b0500566ee41d3ff6fae460412a137185767a1cc4783a6d'], - }), - ('Tree::DAG_Node', '1.31', { - 'source_tmpl': 'Tree-DAG_Node-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['1c8ba69772568b3758054247097512c550efe31517c329fb65eef7afccc9d304'], - }), - ('Template', '3.009', { - 'source_tmpl': 'Template-Toolkit-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AT/ATOOMIC'], - 'checksums': ['d6ad23bbf637a59b5dfd1ac006460dfcb185982e4852cde77150fbd085f1f5b6'], - }), - ('DBI', '1.643', { - 'source_tmpl': 'DBI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TI/TIMB'], - 'checksums': ['8a2b993db560a2c373c174ee976a51027dd780ec766ae17620c20393d2e836fa'], - }), - ('DBD::SQLite', '1.65_02', { - 'source_tmpl': 'DBD-SQLite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IS/ISHIGAKI'], - 'checksums': ['f4139bebc246943f7b5504b03996b8d367e788410c55f7b03238889244d56da0'], - }), - ('Math::Bezier', '0.01', { - 'source_tmpl': 'Math-Bezier-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AB/ABW'], - 'checksums': ['11a815fc45fdf0efabb1822ab77faad8b9eea162572c5f0940c8ed7d56e6b8b8'], - }), - ('Archive::Extract', '0.86', { - 'source_tmpl': 'Archive-Extract-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['9acd09cdb8e8cf0b6d08210a3b80342300c89a359855319bf6b00c14c4aab687'], - }), - ('DBIx::Simple', '1.37', { - 'source_tmpl': 'DBIx-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JU/JUERD'], - 'checksums': ['46d311aa2ce08907401c56119658426dbb044c5a40de73d9a7b79bf50390cae3'], - }), - ('Shell', '0.73', { - 'source_tmpl': 'Shell-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/F/FE/FERREIRA'], - 'checksums': ['f7dbebf65261ed0e5abd0f57052b64d665a1a830bab4c8bbc220f235bd39caf5'], - }), - ('File::Spec', '3.75', { - 'source_tmpl': 'PathTools-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/X/XS/XSAWYERX'], - 'checksums': ['a558503aa6b1f8c727c0073339081a77888606aa701ada1ad62dd9d8c3f945a2'], - }), - ('Test::Simple', '1.302175', { - 'source_tmpl': 'Test-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['c8c8f5c51ad6d7a858c3b61b8b658d8e789d3da5d300065df0633875b0075e49'], - }), - ('Set::Scalar', '1.29', { - 'source_tmpl': 'Set-Scalar-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAVIDO'], - 'checksums': ['a3dc1526f3dde72d3c64ea00007b86ce608cdcd93567cf6e6e42dc10fdc4511d'], - }), - ('IO::Stringy', '2.111', { - 'source_tmpl': 'IO-stringy-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DS/DSKOLL'], - 'checksums': ['8c67fd6608c3c4e74f7324f1404a856c331dbf48d9deda6aaa8296ea41bf199d'], - }), - ('Encode::Locale', '1.05', { - 'source_tmpl': 'Encode-Locale-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['176fa02771f542a4efb1dbc2a4c928e8f4391bf4078473bd6040d8f11adb0ec1'], - }), - ('XML::SAX::Base', '1.09', { - 'source_tmpl': 'XML-SAX-Base-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GR/GRANTM'], - 'checksums': ['66cb355ba4ef47c10ca738bd35999723644386ac853abbeb5132841f5e8a2ad0'], - }), - ('XML::NamespaceSupport', '1.12_9', { - 'source_tmpl': 'XML-NamespaceSupport-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PERIGRIN'], - 'checksums': ['2e84a057f0a8c845a612d212742cb94fca4fc8a433150b5721bd448f77d1e4a9'], - }), - ('XML::SAX', '1.02', { - 'source_tmpl': 'XML-SAX-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GR/GRANTM'], - 'checksums': ['4506c387043aa6a77b455f00f57409f3720aa7e553495ab2535263b4ed1ea12a'], - }), - ('Test::LeakTrace', '0.16', { - 'source_tmpl': 'Test-LeakTrace-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEEJO'], - 'checksums': ['5f089eed915f1ec8c743f6d2777c3ecd0ca01df2f7b9e10038d316952583e403'], - }), - ('Test::Exception', '0.43', { - 'source_tmpl': 'Test-Exception-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['156b13f07764f766d8b45a43728f2439af81a3512625438deab783b7883eb533'], - }), - ('Text::Aligner', '0.16', { - 'source_tmpl': 'Text-Aligner-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['5c857dbce586f57fa3d7c4ebd320023ab3b2963b2049428ae01bd3bc4f215725'], - }), - ('Text::Table', '1.134', { - 'source_tmpl': 'Text-Table-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['bacf429b18b7c0b22c088219055063e3902749531d488ebd7b17eab7757cd10b'], - }), - ('MIME::Types', '2.17', { - 'source_tmpl': 'MIME-Types-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MARKOV'], - 'checksums': ['e04ed7d42f1ff3150a303805f2689c28f80b92c511784d4641cb7f040d3e8ff6'], - }), - ('File::Copy::Recursive', '0.45', { - 'source_tmpl': 'File-Copy-Recursive-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DM/DMUEY'], - 'checksums': ['d3971cf78a8345e38042b208bb7b39cb695080386af629f4a04ffd6549df1157'], - }), - ('Cwd::Guard', '0.05', { - 'source_tmpl': 'Cwd-Guard-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KA/KAZEBURO'], - 'checksums': ['7afc7ca2b9502e440241938ad97a3e7ebd550180ebd6142e1db394186b268e77'], - }), - ('Capture::Tiny', '0.48', { - 'source_tmpl': 'Capture-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['6c23113e87bad393308c90a207013e505f659274736638d8c79bac9c67cc3e19'], - }), - ('File::Copy::Recursive::Reduced', '0.006', { - 'source_tmpl': 'File-Copy-Recursive-Reduced-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JK/JKEENAN'], - 'checksums': ['e618f993a69f4355205c58fffff6982609f28b47f646ec6e244e41b5c6707e2c'], - }), - ('Module::Build::XSUtil', '0.19', { - 'source_tmpl': 'Module-Build-XSUtil-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HI/HIDEAKIO'], - 'checksums': ['9063b3c346edeb422807ffe49ffb23038c4f900d4a77b845ce4b53d97bf29400'], - }), - ('Tie::Function', '0.02', { - 'source_tmpl': 'Tie-Function-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAVIDNICO/handy_tied_functions'], - 'checksums': ['0b1617af218dfab911ba0fbd72210529a246efe140332da77fe3e03d11000117'], - }), - ('Template::Plugin::Number::Format', '1.06', { - 'source_tmpl': 'Template-Plugin-Number-Format-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DARREN'], - 'checksums': ['0865836a1bcbc34d4a0ee34b5ccc14d7b511f1fd300bf390f002dac349539843'], - }), - ('HTML::Parser', '3.72', { - 'source_tmpl': 'HTML-Parser-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['ec28c7e1d9e67c45eca197077f7cdc41ead1bb4c538c7f02a3296a4bb92f608b'], - }), - ('Date::Handler', '1.2', { - 'source_tmpl': 'Date-Handler-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BB/BBEAUSEJ'], - 'checksums': ['c36fd2b68d48c2e17417bf2873c78820f3ae02460fdf5976b8eeab887d59e16c'], - }), - ('Params::Util', '1.07', { - 'source_tmpl': 'Params-Util-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AD/ADAMK'], - 'checksums': ['30f1ec3f2cf9ff66ae96f973333f23c5f558915bb6266881eac7423f52d7c76c'], - }), - ('IO::HTML', '1.001', { - 'source_tmpl': 'IO-HTML-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CJ/CJM'], - 'checksums': ['ea78d2d743794adc028bc9589538eb867174b4e165d7d8b5f63486e6b828e7e0'], - }), - ('Data::Grove', '0.08', { - 'source_tmpl': 'libxml-perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KM/KMACLEOD'], - 'checksums': ['4571059b7b5d48b7ce52b01389e95d798bf5cf2020523c153ff27b498153c9cb'], - }), - ('Class::ISA', '0.36', { - 'source_tmpl': 'Class-ISA-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SM/SMUELLER'], - 'checksums': ['8816f34e9a38e849a10df756030dccf9fe061a196c11ac3faafd7113c929b964'], - }), - ('URI', '1.76', { - 'source_tmpl': 'URI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['b2c98e1d50d6f572483ee538a6f4ccc8d9185f91f0073fd8af7390898254413e'], - }), - ('Ima::DBI', '0.35', { - 'source_tmpl': 'Ima-DBI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PERRIN'], - 'checksums': ['8b481ceedbf0ae4a83effb80581550008bfdd3885ef01145e3733c7097c00a08'], - }), - ('Tie::IxHash', '1.23', { - 'source_tmpl': 'Tie-IxHash-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CH/CHORNY'], - 'checksums': ['fabb0b8c97e67c9b34b6cc18ed66f6c5e01c55b257dcf007555e0b027d4caf56'], - }), - ('GO', '0.04', { - 'source_tmpl': 'go-db-perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SJ/SJCARBON'], - 'checksums': ['8eb73d591ad767e7cf26def40cffd84833875f1ad51e456960b9ed73dc23641b'], - }), - ('Class::DBI::SQLite', '0.11', { - 'source_tmpl': 'Class-DBI-SQLite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA'], - 'checksums': ['c4661b00afb7e53c97ac36e13f34dde43c1a93540a2f4ff97e6182b0c731e4e7'], - }), - ('Pod::POM', '2.01', { - 'source_tmpl': 'Pod-POM-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/N/NE/NEILB'], - 'checksums': ['1b50fba9bbdde3ead192beeba0eaddd0c614e3afb1743fa6fff805f57c56f7f4'], - }), - ('Math::Round', '0.07', { - 'source_tmpl': 'Math-Round-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GR/GROMMEL'], - 'checksums': ['73a7329a86e54a5c29a440382e5803095b58f33129e61a1df0093b4824de9327'], - }), - ('Text::Diff', '1.45', { - 'source_tmpl': 'Text-Diff-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/N/NE/NEILB'], - 'checksums': ['e8baa07b1b3f53e00af3636898bbf73aec9a0ff38f94536ede1dbe96ef086f04'], - }), - ('Log::Message::Simple', '0.10', { - 'source_tmpl': 'Log-Message-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['aa12d1a4c0ac260b94d448fa01feba242a8a85cb6cbfdc66432e3b5b468add96'], - }), - ('Net::SSLeay', '1.85', { - 'source_tmpl': 'Net-SSLeay-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIKEM'], - 'checksums': ['9d8188b9fb1cae3bd791979c20554925d5e94a138d00414f1a6814549927b0c8'], - }), - ('IO::Socket::SSL', '2.068', { - 'source_tmpl': 'IO-Socket-SSL-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SU/SULLR'], - 'checksums': ['4420fc0056f1827b4dd1245eacca0da56e2182b4ef6fc078f107dc43c3fb8ff9'], - }), - ('Fennec::Lite', '0.004', { - 'source_tmpl': 'Fennec-Lite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['dce28e3932762c2ff92aa52d90405c06e898e81cb7b164ccae8966ae77f1dcab'], - }), - ('Sub::Uplevel', '0.2800', { - 'source_tmpl': 'Sub-Uplevel-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['b4f3f63b80f680a421332d8851ddbe5a8e72fcaa74d5d1d98f3c8cc4a3ece293'], - }), - ('Meta::Builder', '0.004', { - 'source_tmpl': 'Meta-Builder-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['acb499aa7206eb9db21eb85357a74521bfe3bdae4a6416d50a7c75b939cf56fe'], - }), - ('Exporter::Declare', '0.114', { - 'source_tmpl': 'Exporter-Declare-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['4bd70d6ca76f6f6ba7e4c618d4ac93b8593a58f1233ccbe18b10f5f204f1d4e4'], - }), - ('Getopt::Long', '2.51', { - 'source_tmpl': 'Getopt-Long-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JV/JV'], - 'checksums': ['20b43b94c2f4096e9e05c213d6184d7391567f127631d69e9b1ffd994d4cc564'], - }), - ('Log::Message', '0.08', { - 'source_tmpl': 'Log-Message-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['bd697dd62aaf26d118e9f0a0813429deb1c544e4501559879b61fcbdfe99fe46'], - }), - ('Mouse', 'v2.5.9', { - 'source_tmpl': 'Mouse-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SK/SKAJI'], - 'checksums': ['b9d78d46ef10b3c9e284a3f81381c91a0f94e7202cb11f514edb1dda4db50c73'], - }), - ('Test::Version', '2.09', { - 'source_tmpl': 'Test-Version-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['9ce1dd2897a5f30e1b7f8966ec66f57d8d8f280f605f28c7ca221fa79aca38e0'], - }), - ('DBIx::Admin::TableInfo', '3.03', { - 'source_tmpl': 'DBIx-Admin-TableInfo-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['a852530f95957a43aa794f2edf5f3fe4ecec35bd20150c38136d4c23d85328b6'], - }), - ('Net::HTTP', '6.19', { - 'source_tmpl': 'Net-HTTP-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['52b76ec13959522cae64d965f15da3d99dcb445eddd85d2ce4e4f4df385b2fc4'], - }), - ('Test::Deep', '1.130', { - 'source_tmpl': 'Test-Deep-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['4064f494f5f62587d0ae501ca439105821ee5846c687dc6503233f55300a7c56'], - }), - ('Test::Warn', '0.36', { - 'source_tmpl': 'Test-Warn-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BIGJ'], - 'checksums': ['ecbca346d379cef8d3c0e4ac0c8eb3b2613d737ffaaeae52271c38d7bf3c6cda'], - }), - ('MRO::Compat', '0.13', { - 'source_tmpl': 'MRO-Compat-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['8a2c3b6ccc19328d5579d02a7d91285e2afd85d801f49d423a8eb16f323da4f8'], - }), - ('Moo', '2.004000', { - 'source_tmpl': 'Moo-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['323240d000394cf38ec42e865b05cb8928f625c82c9391cd2cdc72b33c51b834'], - }), - ('Clone::Choose', '0.010', { - 'source_tmpl': 'Clone-Choose-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HE/HERMES'], - 'checksums': ['5623481f58cee8edb96cd202aad0df5622d427e5f748b253851dfd62e5123632'], - }), - ('Hash::Merge', '0.300', { - 'source_tmpl': 'Hash-Merge-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['402fd52191d51415bb7163b7673fb4a108e3156493d7df931b8db4b2af757c40'], - }), - ('SQL::Abstract', '1.87', { - 'source_tmpl': 'SQL-Abstract-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IL/ILMARI'], - 'checksums': ['e926a0a83da7efa18e57e5b2952a2ab3b7563a51733fc6dd5c89f12156481c4a'], - }), - ('HTML::Form', '6.03', { - 'source_tmpl': 'HTML-Form-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['68c01d94f005d5ca9c4d55ad2a1bf3a8d034a5fc6db187d91a4c42f3fdc9fc36'], - }), - ('Number::Compare', '0.03', { - 'source_tmpl': 'Number-Compare-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RC/RCLAMP'], - 'checksums': ['83293737e803b43112830443fb5208ec5208a2e6ea512ed54ef8e4dd2b880827'], - }), - ('IPC::Run', '20200505.0', { - 'source_tmpl': 'IPC-Run-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TODDR'], - 'checksums': ['816ebf217fa0df99c583d73c0acc6ced78ac773787c664c75cbf140bb7e4c901'], - }), - ('HTML::Entities::Interpolate', '1.10', { - 'source_tmpl': 'HTML-Entities-Interpolate-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['f15a9df92c282419f7010964aca1ada844ddfae7afc735cd2ba1bb20883e955c'], - }), - ('File::Remove', '1.58', { - 'source_tmpl': 'File-Remove-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['81f6ec83acab8ba042afe904334a26eb3a56c217bdb9981d237a89ab072fd0d8'], - }), - ('YAML::Tiny', '1.73', { - 'source_tmpl': 'YAML-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['bc315fa12e8f1e3ee5e2f430d90b708a5dc7e47c867dba8dce3a6b8fbe257744'], - }), - ('Module::Install', '1.19', { - 'source_tmpl': 'Module-Install-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['1a53a78ddf3ab9e3c03fc5e354b436319a944cba4281baf0b904fa932a13011b'], - }), - ('Config::Tiny', '2.24', { - 'source_tmpl': 'Config-Tiny-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['1064948e4bc57e86e318dbc8791c53ca5b9d95b958cc474367c3277981135232'], - }), - ('Test::ClassAPI', '1.07', { - 'source_tmpl': 'Test-ClassAPI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['30e9dbfc5e0cc2ee14eae8f3465a908a710daecbd0a3ebdb2888fc4504fa18aa'], - }), - ('Test::Most', '0.37', { - 'source_tmpl': 'Test-Most-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OV/OVID'], - 'checksums': ['533370141eb9f18cf4ac380f6ded2ab57802a6e184008a80fd2304bfcc474fc7'], - }), - ('Class::Accessor', '0.51', { - 'source_tmpl': 'Class-Accessor-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KA/KASEI'], - 'checksums': ['bf12a3e5de5a2c6e8a447b364f4f5a050bf74624c56e315022ae7992ff2f411c'], - }), - ('Test::Differences', '0.67', { - 'source_tmpl': 'Test-Differences-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DC/DCANTRELL'], - 'checksums': ['c88dbbb48b934b069284874f33abbaaa438aa31204aa3fa73bfc2f4aeac878da'], - }), - ('HTTP::Tiny', '0.076', { - 'source_tmpl': 'HTTP-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['ddbdaa2fb511339fa621a80021bf1b9733fddafc4fe0245f26c8b92171ef9387'], - }), - ('Package::DeprecationManager', '0.17', { - 'source_tmpl': 'Package-DeprecationManager-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['1d743ada482b5c9871d894966e87d4c20edc96931bb949fb2638b000ddd6684b'], - }), - ('Digest::SHA1', '2.13', { - 'source_tmpl': 'Digest-SHA1-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['68c1dac2187421f0eb7abf71452a06f190181b8fc4b28ededf5b90296fb943cc'], - }), - ('Date::Language', '2.30', { - 'source_tmpl': 'TimeDate-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GB/GBARR'], - 'checksums': ['75bd254871cb5853a6aa0403ac0be270cdd75c9d1b6639f18ecba63c15298e86'], - }), - ('version', '0.9924', { - 'source_tmpl': 'version-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JP/JPEACOCK'], - 'checksums': ['81e4485ff3faf9b7813584d57b557f4b34e73b6c2eb696394f6deefacf5ca65b'], - }), - ('Sub::Uplevel', '0.2800', { - 'source_tmpl': 'Sub-Uplevel-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['b4f3f63b80f680a421332d8851ddbe5a8e72fcaa74d5d1d98f3c8cc4a3ece293'], - }), - ('XML::Bare', '0.53', { - 'source_tmpl': 'XML-Bare-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CO/CODECHILD'], - 'checksums': ['865e198e98d904be1683ef5a53a4948f02dabdacde59fc554a082ffbcc5baefd'], - }), - ('Dist::CheckConflicts', '0.11', { - 'source_tmpl': 'Dist-CheckConflicts-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DO/DOY'], - 'checksums': ['ea844b9686c94d666d9d444321d764490b2cde2f985c4165b4c2c77665caedc4'], - }), - ('Sub::Name', '0.26', { - 'source_tmpl': 'Sub-Name-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['2d2f2d697d516c89547e7c4307f1e79441641cae2c7395e7319b306d390df105'], - }), - ('Time::Piece', '1.3401', { - 'source_tmpl': 'Time-Piece-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ES/ESAYM'], - 'checksums': ['4b55b7bb0eab45cf239a54dfead277dfa06121a43e63b3fce0853aecfdb04c27'], - }), - ('Digest::HMAC', '1.03', { - 'source_tmpl': 'Digest-HMAC-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['3bc72c6d3ff144d73aefb90e9a78d33612d58cf1cd1631ecfb8985ba96da4a59'], - }), - ('HTTP::Negotiate', '6.01', { - 'source_tmpl': 'HTTP-Negotiate-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['1c729c1ea63100e878405cda7d66f9adfd3ed4f1d6cacaca0ee9152df728e016'], - }), - ('MIME::Lite', '3.031', { - 'source_tmpl': 'MIME-Lite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['f1235866482b67f00858b3edaa4ff4cf909ef900f1d15d889948bf9c03a591e0'], - }), - ('Crypt::Rijndael', '1.14', { - 'source_tmpl': 'Crypt-Rijndael-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['6451c3dffe8703523be2bb08d1adca97e77df2a8a4dd46944d18a99330b7850e'], - }), - ('B::Lint', '1.20', { - 'source_tmpl': 'B-Lint-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['dc49408964fd8b7963859c92e013f0b9f92f74be5a7c2a78e3996279827c10b3'], - }), - ('Canary::Stability', '2013', { - 'source_tmpl': 'Canary-Stability-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/ML/MLEHMANN'], - 'checksums': ['a5c91c62cf95fcb868f60eab5c832908f6905221013fea2bce3ff57046d7b6ea'], - }), - ('AnyEvent', '7.17', { - 'source_tmpl': 'AnyEvent-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/ML/MLEHMANN'], - 'checksums': ['50beea689c098fe4aaeb83806c40b9fe7f946d5769acf99f849f099091a4b985'], - }), - ('Object::Accessor', '0.48', { - 'source_tmpl': 'Object-Accessor-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['76cb824a27b6b4e560409fcf6fd5b3bfbbd38b72f1f3d37ed0b54bd9c0baeade'], - }), - ('Data::UUID', '1.224', { - 'source_tmpl': 'Data-UUID-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['c958c17502bbef5eccf40c040874895747f7f9b9f9e5192c18688370981bf47c'], - }), - ('Test::Pod', '1.52', { - 'source_tmpl': 'Test-Pod-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['60a8dbcc60168bf1daa5cc2350236df9343e9878f4ab9830970a5dde6fe8e5fc'], - }), - ('AppConfig', '1.71', { - 'source_tmpl': 'AppConfig-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/N/NE/NEILB'], - 'checksums': ['1177027025ecb09ee64d9f9f255615c04db5e14f7536c344af632032eb887b0f'], - }), - ('Net::SMTP::SSL', '1.04', { - 'source_tmpl': 'Net-SMTP-SSL-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['7b29c45add19d3d5084b751f7ba89a8e40479a446ce21cfd9cc741e558332a00'], - }), - ('XML::Tiny', '2.07', { - 'source_tmpl': 'XML-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DC/DCANTRELL'], - 'checksums': ['ce39fcb53e0fe9f1cbcd86ddf152e1db48566266b70ec0769ef364eeabdd8941'], - }), - ('HTML::Tagset', '3.20', { - 'source_tmpl': 'HTML-Tagset-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PETDANCE'], - 'checksums': ['adb17dac9e36cd011f5243881c9739417fd102fce760f8de4e9be4c7131108e2'], - }), - ('HTML::Tree', '5.07', { - 'source_tmpl': 'HTML-Tree-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KE/KENTNL'], - 'checksums': ['f0374db84731c204b86c1d5b90975fef0d30a86bd9def919343e554e31a9dbbf'], - }), - ('Devel::GlobalDestruction', '0.14', { - 'source_tmpl': 'Devel-GlobalDestruction-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['34b8a5f29991311468fe6913cadaba75fd5d2b0b3ee3bb41fe5b53efab9154ab'], - }), - ('WWW::RobotRules', '6.02', { - 'source_tmpl': 'WWW-RobotRules-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['46b502e7a288d559429891eeb5d979461dd3ecc6a5c491ead85d165b6e03a51e'], - }), - ('Expect', '1.35', { - 'source_tmpl': 'Expect-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JA/JACOBY'], - 'checksums': ['09d92761421decd495853103379165a99efbf452c720f30277602cf23679fd06'], - }), - ('Term::UI', '0.46', { - 'source_tmpl': 'Term-UI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['91946c80d7f4aab0ca4bfedc3bbe0a75b37cab1a29bd7bca3b3b7456d417e9a6'], - }), - ('Net::SNMP', 'v6.0.1', { - 'source_tmpl': 'Net-SNMP-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DT/DTOWN'], - 'checksums': ['14c37bc1cbb3f3cdc7d6c13e0f27a859f14cdcfd5ea54a0467a88bc259b0b741'], - }), - ('XML::Filter::BufferText', '1.01', { - 'source_tmpl': 'XML-Filter-BufferText-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RB/RBERJON'], - 'checksums': ['8fd2126d3beec554df852919f4739e689202cbba6a17506e9b66ea165841a75c'], - }), - ('XML::SAX::Writer', '0.57', { - 'source_tmpl': 'XML-SAX-Writer-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PERIGRIN'], - 'checksums': ['3d61d07ef43b0126f5b4de4f415a256fa859fa88dc4fdabaad70b7be7c682cf0'], - }), - ('Statistics::Descriptive', '3.0702', { - 'source_tmpl': 'Statistics-Descriptive-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['f98a10c625640170cdda408cccc72bdd7f66f8ebe5f59dec1b96185171ef11d0'], - }), - ('Class::Load', '0.25', { - 'source_tmpl': 'Class-Load-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['2a48fa779b5297e56156380e8b32637c6c58decb4f4a7f3c7350523e11275f8f'], - }), - ('LWP::Simple', '6.46', { - 'source_tmpl': 'libwww-perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['7a713375d0e02ad8238a5d58340ade10466fe209f752fd62fd182c173c4423c4'], - }), - ('Time::Piece::MySQL', '0.06', { - 'source_tmpl': 'Time-Piece-MySQL-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KA/KASEI'], - 'checksums': ['319601feec17fae344988a5ee91cfc6a0bcfe742af77dba254724c3268b2a60f'], - }), - ('Package::Stash::XS', '0.29', { - 'source_tmpl': 'Package-Stash-XS-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['d3676ba94641e03d6a30e951f09266c4c3ca3f5b58aa7b314a67f28e419878aa'], - }), - ('Set::Array', '0.30', { - 'source_tmpl': 'Set-Array-%(version)s.tgz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RS/RSAVAGE'], - 'checksums': ['d9f024c8e3637feccdebcf6479b6754b6c92f1209f567feaf0c23818af31ee3c'], - }), - ('boolean', '0.46', { - 'source_tmpl': 'boolean-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IN/INGY'], - 'checksums': ['95c088085c3e83bf680fe6ce16d8264ec26310490f7d1680e416ea7a118f156a'], - }), - ('Number::Format', '1.75', { - 'source_tmpl': 'Number-Format-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/W/WR/WRW'], - 'checksums': ['82d659cb16461764fd44d11a9ce9e6a4f5e8767dc1069eb03467c6e55de257f3'], - }), - ('Data::Stag', '0.14', { - 'source_tmpl': 'Data-Stag-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CM/CMUNGALL'], - 'checksums': ['4ab122508d2fb86d171a15f4006e5cf896d5facfa65219c0b243a89906258e59'], - }), - ('Test::NoWarnings', '1.04', { - 'source_tmpl': 'Test-NoWarnings-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AD/ADAMK'], - 'checksums': ['638a57658cb119af1fe5b15e73d47c2544dcfef84af0c6b1b2e97f08202b686c'], - }), - ('Crypt::DES', '2.07', { - 'source_tmpl': 'Crypt-DES-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DP/DPARIS'], - 'checksums': ['2db1ebb5837b4cb20051c0ee5b733b4453e3137df0a92306034c867621edd7e7'], - }), - ('Exporter', '5.74', { - 'source_tmpl': 'Exporter-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TODDR'], - 'checksums': ['eadb889ef673ad940da6aa4f6f7d75fc1e625ae786ae3533fd313eaf629945b8'], - }), - ('Class::Inspector', '1.36', { - 'source_tmpl': 'Class-Inspector-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['cc295d23a472687c24489d58226ead23b9fdc2588e522f0b5f0747741700694e'], - }), - ('Parse::RecDescent', '1.967015', { - 'source_tmpl': 'Parse-RecDescent-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JT/JTBRAUN'], - 'checksums': ['1943336a4cb54f1788a733f0827c0c55db4310d5eae15e542639c9dd85656e37'], - }), - ('Carp', '1.50', { - 'source_tmpl': 'Carp-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/X/XS/XSAWYERX'], - 'checksums': ['f5273b4e1a6d51b22996c48cb3a3cbc72fd456c4038f5c20b127e2d4bcbcebd9'], - }), - ('XML::Parser', '2.46', { - 'source_tmpl': 'XML-Parser-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TODDR'], - 'checksums': ['d331332491c51cccfb4cb94ffc44f9cd73378e618498d4a37df9e043661c515d'], - }), - ('XML::XPath', '1.44', { - 'source_tmpl': 'XML-XPath-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MANWAR'], - 'checksums': ['1cc9110705165dc09dd09974dd7c0b6709c9351d6b6b1cef5a711055f891dd0f'], - }), - ('JSON', '4.02', { - 'source_tmpl': 'JSON-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IS/ISHIGAKI'], - 'checksums': ['444a88755a89ffa2a5424ab4ed1d11dca61808ebef57e81243424619a9e8627c'], - }), - ('Sub::Exporter', '0.987', { - 'source_tmpl': 'Sub-Exporter-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['543cb2e803ab913d44272c7da6a70bb62c19e467f3b12aaac4c9523259b083d6'], - }), - ('Class::Load::XS', '0.10', { - 'source_tmpl': 'Class-Load-XS-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['5bc22cf536ebfd2564c5bdaf42f0d8a4cee3d1930fc8b44b7d4a42038622add1'], - }), - ('Set::IntSpan::Fast', '1.15', { - 'source_tmpl': 'Set-IntSpan-Fast-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AN/ANDYA'], - 'checksums': ['cfb1768c24f55208e87405b17f537f0f303fa141891d0b22d509a941aa57e24e'], - }), - ('Sub::Exporter::Progressive', '0.001013', { - 'source_tmpl': 'Sub-Exporter-Progressive-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/F/FR/FREW'], - 'checksums': ['d535b7954d64da1ac1305b1fadf98202769e3599376854b2ced90c382beac056'], - }), - ('Data::Dumper::Concise', '2.023', { - 'source_tmpl': 'Data-Dumper-Concise-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['a6c22f113caf31137590def1b7028a7e718eface3228272d0672c25e035d5853'], - }), - ('File::Slurp::Tiny', '0.004', { - 'source_tmpl': 'File-Slurp-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['452995beeabf0e923e65fdc627a725dbb12c9e10c00d8018c16d10ba62757f1e'], - }), - ('Algorithm::Diff', '1.1903', { - 'source_tmpl': 'Algorithm-Diff-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TY/TYEMQ'], - 'checksums': ['30e84ac4b31d40b66293f7b1221331c5a50561a39d580d85004d9c1fff991751'], - }), - ('AnyData', '0.12', { - 'source_tmpl': 'AnyData-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['be6a957f04a2feba9b305536b132deceba1f455db295b221a63e75567fadbcfc'], - }), - ('Text::Iconv', '1.7', { - 'source_tmpl': 'Text-Iconv-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MP/MPIOTR'], - 'checksums': ['5b80b7d5e709d34393bcba88971864a17b44a5bf0f9e4bcee383d029e7d2d5c3'], - }), - ('Class::Data::Inheritable', '0.08', { - 'source_tmpl': 'Class-Data-Inheritable-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TM/TMTM'], - 'checksums': ['9967feceea15227e442ec818723163eb6d73b8947e31f16ab806f6e2391af14a'], - }), - ('Text::Balanced', '2.03', { - 'source_tmpl': 'Text-Balanced-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHAY'], - 'checksums': ['057753f8f0568b53921f66a60a89c30092b73329bcc61a2c43339ab70c9792c8'], - }), - ('strictures', '2.000006', { - 'source_tmpl': 'strictures-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['09d57974a6d1b2380c802870fed471108f51170da81458e2751859f2714f8d57'], - }), - ('Switch', '2.17', { - 'source_tmpl': 'Switch-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CH/CHORNY'], - 'checksums': ['31354975140fe6235ac130a109496491ad33dd42f9c62189e23f49f75f936d75'], - }), - ('File::Which', '1.23', { - 'source_tmpl': 'File-Which-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['b79dc2244b2d97b6f27167fc3b7799ef61a179040f3abd76ce1e0a3b0bc4e078'], - }), - ('Email::Date::Format', '1.005', { - 'source_tmpl': 'Email-Date-Format-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['579c617e303b9d874411c7b61b46b59d36f815718625074ae6832e7bb9db5104'], - }), - ('Error', '0.17029', { - 'source_tmpl': 'Error-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['1a23f7913032aed6d4b68321373a3899ca66590f4727391a091ec19c95bf7adc'], - }), - ('Mock::Quick', '1.111', { - 'source_tmpl': 'Mock-Quick-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/EX/EXODIST'], - 'checksums': ['ff786008bf8c022064ececd3b7ed89c76b35e8d1eac6cf472a9f51771c1c9f2c'], - }), - ('Text::CSV', '2.00', { - 'source_tmpl': 'Text-CSV-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IS/ISHIGAKI'], - 'checksums': ['8ccbd9195805222d995844114d0e595bb24ce188f85284dbf256080311cbb2c2'], - }), - ('Test::Output', '1.031', { - 'source_tmpl': 'Test-Output-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BD/BDFOY'], - 'checksums': ['f8b8f37185717872727d06f6c078fa77db794410faf2f6da4d37b0b7650f7ea4'], - }), - ('Class::DBI', 'v3.0.17', { - 'source_tmpl': 'Class-DBI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TM/TMTM'], - 'checksums': ['541354fe361c56850cb11261f6ca089a14573fa764792447444ff736ae626206'], - }), - ('List::SomeUtils', '0.56', { - 'source_tmpl': 'List-SomeUtils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['eaa7d99ce86380c0389876474c8eb84acc0a6bfeef1b0fc23a292592de6f89f7'], - }), - ('List::UtilsBy', '0.11', { - 'source_tmpl': 'List-UtilsBy-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PEVANS'], - 'checksums': ['faddf43b4bc21db8e4c0e89a26e5f23fe626cde3491ec651b6aa338627f5775a'], - }), - ('List::AllUtils', '0.16', { - 'source_tmpl': 'List-AllUtils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['559b3aa911c73003a3a1ebd860d3b16e171137de8203d86be63a2390364c63dd'], - }), - ('UNIVERSAL::moniker', '0.08', { - 'source_tmpl': 'UNIVERSAL-moniker-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/K/KA/KASEI'], - 'checksums': ['94ce27a546cd57cb52e080a8f2533a7cc2350028388582485bd1039a37871f9c'], - }), - ('Exception::Class', '1.44', { - 'source_tmpl': 'Exception-Class-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['33f3fbf8b138d3b04ea4ec0ba83fb0df6ba898806bcf4ef393d4cafc1a23ee0d'], - }), - ('File::CheckTree', '4.42', { - 'source_tmpl': 'File-CheckTree-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['66fb417f8ff8a5e5b7ea25606156e70e204861c59fa8c3831925b4dd3f155f8a'], - }), - ('Math::VecStat', '0.08', { - 'source_tmpl': 'Math-VecStat-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/A/AS/ASPINELLI'], - 'checksums': ['409a8e0e4b1025c8e80f628f65a9778aa77ab285161406ca4a6c097b13656d0d'], - }), - ('Pod::LaTeX', '0.61', { - 'source_tmpl': 'Pod-LaTeX-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TJ/TJENNESS'], - 'checksums': ['15a840ea1c8a76cd3c865fbbf2fec33b03615c0daa50f9c800c54e0cf0659d46'], - }), - ('Eval::Closure', '0.14', { - 'source_tmpl': 'Eval-Closure-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DO/DOY'], - 'checksums': ['ea0944f2f5ec98d895bef6d503e6e4a376fea6383a6bc64c7670d46ff2218cad'], - }), - ('HTTP::Request', '6.18', { - 'source_tmpl': 'HTTP-Message-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['d060d170d388b694c58c14f4d13ed908a2807f0e581146cef45726641d809112'], - }), - ('XML::Twig', '3.52', { - 'source_tmpl': 'XML-Twig-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIROD'], - 'checksums': ['fef75826c24f2b877d0a0d2645212fc4fb9756ed4d2711614ac15c497e8680ad'], - }), - ('IO::String', '1.08', { - 'source_tmpl': 'IO-String-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['2a3f4ad8442d9070780e58ef43722d19d1ee21a803bf7c8206877a10482de5a0'], - }), - ('XML::Simple', '2.25', { - 'source_tmpl': 'XML-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GR/GRANTM'], - 'checksums': ['531fddaebea2416743eb5c4fdfab028f502123d9a220405a4100e68fc480dbf8'], - }), - ('Sub::Install', '0.928', { - 'source_tmpl': 'Sub-Install-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['61e567a7679588887b7b86d427bc476ea6d77fffe7e0d17d640f89007d98ef0f'], - }), - ('HTTP::Cookies', '6.08', { - 'source_tmpl': 'HTTP-Cookies-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/O/OA/OALDERS'], - 'checksums': ['49ebb73576eb41063c04bc079477df094496deec805ae033f3be338c23c3af59'], - }), - ('Pod::Plainer', '1.04', { - 'source_tmpl': 'Pod-Plainer-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RM/RMBARKER'], - 'checksums': ['1bbfbf7d1d4871e5a83bab2137e22d089078206815190eb1d5c1260a3499456f'], - }), - ('Test::Exception::LessClever', '0.009', { - 'source_tmpl': 'Test-Exception-LessClever-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['3b2731a44956a11f74b46b3ecf0734fab651e1c0bcf120f8b407aa1b4d43ac34'], - }), - ('LWP::MediaTypes', '6.02', { - 'source_tmpl': 'LWP-MediaTypes-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['18790b0cc5f0a51468495c3847b16738f785a2d460403595001e0b932e5db676'], - }), - ('Scalar::List::Utils', '1.55', { - 'modulename': 'List::Util', - 'source_tmpl': 'Scalar-List-Utils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PEVANS'], - 'checksums': ['4d2bdc1c72a7bc4d69d6a5cc85bc7566497c3b183c6175b832784329d58feb4b'], - }), - ('Data::Section::Simple', '0.07', { - 'source_tmpl': 'Data-Section-Simple-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA'], - 'checksums': ['0b3035ffdb909aa1f7ded6b608fa9d894421c82c097d51e7171170d67579a9cb'], - }), - ('Class::Trigger', '0.15', { - 'source_tmpl': 'Class-Trigger-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA'], - 'checksums': ['b7a878d44dea67d64df2ca18020d9d868a95596debd16f1a264874209332b07f'], - }), - ('HTTP::Daemon', '6.01', { - 'source_tmpl': 'HTTP-Daemon-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['43fd867742701a3f9fcc7bd59838ab72c6490c0ebaf66901068ec6997514adc2'], - }), - ('File::HomeDir', '1.004', { - 'source_tmpl': 'File-HomeDir-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['45f67e2bb5e60a7970d080e8f02079732e5a8dfc0c7c3cbdb29abfb3f9f791ad'], - }), - ('HTTP::Date', '6.02', { - 'source_tmpl': 'HTTP-Date-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['e8b9941da0f9f0c9c01068401a5e81341f0e3707d1c754f8e11f42a7e629e333'], - }), - ('Authen::SASL', '2.16', { - 'source_tmpl': 'Authen-SASL-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GB/GBARR'], - 'checksums': ['6614fa7518f094f853741b63c73f3627168c5d3aca89b1d02b1016dc32854e09'], - }), - ('Clone', '0.42', { - 'source_tmpl': 'Clone-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GARU'], - 'checksums': ['54a930db8f178321cf201da040442d198e8c18a77e7fcabb578e460b6acb07e5'], - }), - ('Data::Types', '0.17', { - 'source_tmpl': 'Data-Types-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MANWAR'], - 'checksums': ['860751feb79b7dfc1af71c4b7fe920220ec6d31c4ab9402b8f178f7f4b8293c1'], - }), - ('Import::Into', '1.002005', { - 'source_tmpl': 'Import-Into-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['bd9e77a3fb662b40b43b18d3280cd352edf9fad8d94283e518181cc1ce9f0567'], - }), - ('DateTime::Tiny', '1.07', { - 'source_tmpl': 'DateTime-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['83568a22838cb518fbeb9e060460ec7f59d5a0b0a1cc06562954c3674d7cf7e4'], - }), - ('DBD::AnyData', '0.110', { - 'source_tmpl': 'DBD-AnyData-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['247f0d88e55076fd3f6c7bf3fd527989d62fcc1ef9bde9bf2ee11c280adcaeab'], - }), - ('Text::Format', '0.61', { - 'source_tmpl': 'Text-Format-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF'], - 'checksums': ['bb8a3b8ff515c85101baf553a769337f944a05cde81f111ae78aff416bf4ae2b'], - }), - ('Devel::CheckCompiler', '0.07', { - 'source_tmpl': 'Devel-CheckCompiler-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SY/SYOHEX'], - 'checksums': ['768b7697b4b8d4d372c7507b65e9dd26aa4223f7100183bbb4d3af46d43869b5'], - }), - ('Log::Handler', '0.90', { - 'source_tmpl': 'Log-Handler-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BL/BLOONIX'], - 'checksums': ['3a5c80e7128454770f83acab8cbd3e70e5ec3d59a61dc32792a178f0b31bf74d'], - }), - ('DBIx::ContextualFetch', '1.03', { - 'source_tmpl': 'DBIx-ContextualFetch-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TM/TMTM'], - 'checksums': ['85e2f805bfc81cd738c294316b27a515397036f397a0ff1c6c8d754c38530306'], - }), - ('Devel::StackTrace', '2.04', { - 'source_tmpl': 'Devel-StackTrace-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['cd3c03ed547d3d42c61fa5814c98296139392e7971c092e09a431f2c9f5d6855'], - }), - ('Term::ReadKey', '2.38', { - 'source_tmpl': 'TermReadKey-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JS/JSTOWE'], - 'checksums': ['5a645878dc570ac33661581fbb090ff24ebce17d43ea53fd22e105a856a47290'], - }), - ('Set::IntSpan', '1.19', { - 'source_tmpl': 'Set-IntSpan-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SW/SWMCD'], - 'checksums': ['11b7549b13ec5d87cc695dd4c777cd02983dd5fe9866012877fb530f48b3dfd0'], - }), - ('Moose', '2.2013', { - 'source_tmpl': 'Moose-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['df74dc78088921178edf72d827017d6c92737c986659f2dadc533ae24675e77c'], - }), - ('Algorithm::Dependency', '1.112', { - 'source_tmpl': 'Algorithm-Dependency-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['7e0fb7c39f56a2dccf9d0295c82f3031ee116e807f6a12a438fa4dd41b0ec187'], - }), - ('Font::TTF', '1.06', { - 'source_tmpl': 'Font-TTF-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BH/BHALLISSY'], - 'checksums': ['4b697d444259759ea02d2c442c9bffe5ffe14c9214084a01f743693a944cc293'], - }), - ('IPC::Run3', '0.048', { - 'source_tmpl': 'IPC-Run3-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['3d81c3cc1b5cff69cca9361e2c6e38df0352251ae7b41e2ff3febc850e463565'], - }), - ('File::Find::Rule', '0.34', { - 'source_tmpl': 'File-Find-Rule-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RC/RCLAMP'], - 'checksums': ['7e6f16cc33eb1f29ff25bee51d513f4b8a84947bbfa18edb2d3cc40a2d64cafe'], - }), - ('SQL::Statement', '1.412', { - 'source_tmpl': 'SQL-Statement-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RE/REHSACK'], - 'checksums': ['65c870883379c11b53f19ead10aaac241ccc86a90bbab77f6376fe750720e5c8'], - }), - ('File::Slurp', '9999.32', { - 'source_tmpl': 'File-Slurp-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CA/CAPOEIRAB'], - 'checksums': ['4c3c21992a9d42be3a79dd74a3c83d27d38057269d65509a2f555ea0fb2bc5b0'], - }), - ('Package::Stash', '0.38', { - 'source_tmpl': 'Package-Stash-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['c58ee8844df2dda38e3bf66fdf443439aaefaef1a33940edf2055f0afd223a7f'], - }), - ('Data::OptList', '0.110', { - 'source_tmpl': 'Data-OptList-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['366117cb2966473f2559f2f4575ff6ae69e84c69a0f30a0773e1b51a457ef5c3'], - }), - ('Package::Constants', '0.06', { - 'source_tmpl': 'Package-Constants-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['0b58be78706ccc4e4bd9bbad41767470427fd7b2cfad749489de101f85bc5df5'], - }), - ('CPANPLUS', '0.9908', { - 'source_tmpl': 'CPANPLUS-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BI/BINGOS'], - 'checksums': ['58f6acb47d791ed8e30a6ebcc25089218bacad96e46da8e621a92b778c569dd4'], - }), - ('IO::Tty', '1.14', { - 'source_tmpl': 'IO-Tty-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TO/TODDR'], - 'checksums': ['51f3e4e311128bdb2c6a15f02c51376cb852ccf9df9bebe8dfbb5f9561eb95b5'], - }), - ('Text::Soundex', '3.05', { - 'source_tmpl': 'Text-Soundex-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['f6dd55b4280b25dea978221839864382560074e1d6933395faee2510c2db60ed'], - }), - ('Lingua::EN::PluralToSingular', '0.21', { - 'source_tmpl': 'Lingua-EN-PluralToSingular-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/B/BK/BKB'], - 'checksums': ['f8a8b7de28c25c96190d7f48c90b5ad9b9bf517f3835c77641f0e8fa546c0d1d'], - }), - ('Want', '0.29', { - 'source_tmpl': 'Want-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RO/ROBIN'], - 'checksums': ['b4e4740b8d4cb783591273c636bd68304892e28d89e88abf9273b1de17f552f7'], - }), - ('Bundle::BioPerl', '2.1.9', { - 'source_tmpl': 'Bundle-BioPerl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CJ/CJFIELDS'], - 'checksums': ['c343ba97f49d86e7fb14aef4cfe3124992e2a5c3168e53a54606dd611d73e5c7'], - }), - ('Mail::Util', '2.21', { - 'source_tmpl': 'MailTools-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MARKOV'], - 'checksums': ['4ad9bd6826b6f03a2727332466b1b7d29890c8d99a32b4b3b0a8d926ee1a44cb'], - }), - ('Text::Template', '1.59', { - 'source_tmpl': 'Text-Template-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MS/MSCHOUT'], - 'checksums': ['1dd2c788c05303ed9a970e1881109642151fa93e02c7a80d4c70608276bab1ee'], - }), - ('PDF::API2', '2.037', { - 'source_tmpl': 'PDF-API2-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/S/SS/SSIMMS'], - 'checksums': ['142803d1886d2a2919d374fb6c25681630aa26740e3f8023337f996fa6c6297e'], - }), - ('Devel::CheckLib', '1.14', { - 'source_tmpl': 'Devel-CheckLib-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MATTN'], - 'checksums': ['f21c5e299ad3ce0fdc0cb0f41378dca85a70e8d6c9a7599f0e56a957200ec294'], - }), - ('SVG', '2.85', { - 'source_tmpl': 'SVG-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MANWAR'], - 'checksums': ['159ec81f3236175957c9a4e911cb0e3715dc5b658144c8a5418b772768a1477c'], - }), - ('Statistics::Basic', '1.6611', { - 'source_tmpl': 'Statistics-Basic-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JE/JETTERO'], - 'checksums': ['6855ce5615fd3e1af4cfc451a9bf44ff29a3140b4e7130034f1f0af2511a94fb'], - }), - ('Log::Log4perl', '1.49', { - 'source_tmpl': 'Log-Log4perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MS/MSCHILLI'], - 'checksums': ['b739187f519146cb6bebcfc427c64b1f4138b35c5f4c96f46a21ed4a43872e16'], - }), - ('Math::CDF', '0.1', { - 'source_tmpl': 'Math-CDF-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/C/CA/CALLAHAN'], - 'checksums': ['7896bf250835ce47dcc813cb8cf9dc576c5455de42e822dcd7d8d3fef2125565'], - }), - ('Array::Utils', '0.5', { - 'source_tmpl': 'Array-Utils-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/Z/ZM/ZMIJ/Array'], - 'checksums': ['89dd1b7fcd9b4379492a3a77496e39fe6cd379b773fd03a6b160dd26ede63770'], - }), - ('File::Grep', '0.02', { - 'source_tmpl': 'File-Grep-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MN/MNEYLON'], - 'checksums': ['462e15274eb6278521407ea302d9eea7252cd44cab2382871f7de833d5f85632'], - }), - ('File::Path', '2.17', { - 'source_tmpl': 'File-Path-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JK/JKEENAN'], - 'checksums': ['8c506dfd69a70fdd5f1212fe58fbc53620a89a8293e2ac6860570f868269fb31'], - }), - ('File::Slurper', '0.012', { - 'source_tmpl': 'File-Slurper-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['4efb2ea416b110a1bda6f8133549cc6ea3676402e3caf7529fce0313250aa578'], - }), - ('File::Temp', '0.2309', { - 'source_tmpl': 'File-Temp-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['01b242a9888d155db981aa0a9891ce2c9e439f0e4bbff4dbf17ca4997be6235f'], - }), - ('Graph', '0.9704', { - 'source_tmpl': 'Graph-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/J/JH/JHI'], - 'checksums': ['325e8eb07be2d09a909e450c13d3a42dcb2a2e96cc3ac780fe4572a0d80b2a25'], - }), - ('Graph::ReadWrite', '2.09', { - 'source_tmpl': 'Graph-ReadWrite-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/N/NE/NEILB'], - 'checksums': ['b01ef06ce922eea12d5ce614d63ddc5f3ee7ad0d05f9577051d3f87a89799a4a'], - }), - ('PerlIO::utf8_strict', '0.007', { - 'source_tmpl': 'PerlIO-utf8_strict-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['83a33f2fe046cb3ad6afc80790635a423e2c7c6854afacc6998cd46951cc81cb'], - }), - ('Devel::OverloadInfo', '0.005', { - 'source_tmpl': 'Devel-OverloadInfo-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/I/IL/ILMARI'], - 'checksums': ['8bfde2ffa47c9946f8adc8cfc445c2f97b8d1cdd678111bee9f444e82f7aa6e7'], - }), - ('Sub::Identify', '0.14', { - 'source_tmpl': 'Sub-Identify-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RG/RGARCIA'], - 'checksums': ['068d272086514dd1e842b6a40b1bedbafee63900e5b08890ef6700039defad6f'], - }), - ('Digest::MD5::File', '0.08', { - 'source_tmpl': 'Digest-MD5-File-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DM/DMUEY'], - 'checksums': ['adb43a54e32627b4f7e57c9640e6eb06d0bb79d8ea54cd0bd79ed35688fb1218'], - }), - ('String::RewritePrefix', '0.008', { - 'source_tmpl': 'String-RewritePrefix-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['e45a31d6914e8f5fc722ef48d8819400dafc02105e0c61414aabbf01bce208eb'], - }), - ('Getopt::Long::Descriptive', '0.105', { - 'source_tmpl': 'Getopt-Long-Descriptive-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['a71cdbcf4043588b26a42a13d151c243f6eccf38e8fc0b18ffb5b53651ab8c15'], - }), - ('App::Cmd', '0.331', { - 'source_tmpl': 'App-Cmd-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['4a5d3df0006bd278880d01f4957aaa652a8f91fe8f66e93adf70fba0c3ecb680'], - }), - ('Path::Tiny', '0.108', { - 'source_tmpl': 'Path-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['3c49482be2b3eb7ddd7e73a5b90cff648393f5d5de334ff126ce7a3632723ff5'], - }), - ('Carp::Clan', '6.08', { - 'source_tmpl': 'Carp-Clan-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['c75f92e34422cc5a65ab05d155842b701452434e9aefb649d6e2289c47ef6708'], - }), - ('Sub::Exporter::ForMethods', '0.100052', { - 'source_tmpl': 'Sub-Exporter-ForMethods-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['421fbba4f6ffcf13c4335f2c20630d709e6fa659c07545d094dbc5a558ad3006'], - }), - ('MooseX::Types', '0.50', { - 'source_tmpl': 'MooseX-Types-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['9cd87b3492cbf0be9d2df9317b2adf9fc30663770e69906654bea3f41b17cb08'], - }), - ('B::Hooks::EndOfScope', '0.24', { - 'source_tmpl': 'B-Hooks-EndOfScope-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['03aa3dfe5d0aa6471a96f43fe8318179d19794d4a640708f0288f9216ec7acc6'], - }), - ('namespace::clean', '0.27', { - 'source_tmpl': 'namespace-clean-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RI/RIBASUSHI'], - 'checksums': ['8a10a83c3e183dc78f9e7b7aa4d09b47c11fb4e7d3a33b9a12912fd22e31af9d'], - }), - ('namespace::autoclean', '0.29', { - 'source_tmpl': 'namespace-autoclean-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['45ebd8e64a54a86f88d8e01ae55212967c8aa8fed57e814085def7608ac65804'], - }), - ('File::pushd', '1.016', { - 'source_tmpl': 'File-pushd-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['d73a7f09442983b098260df3df7a832a5f660773a313ca273fa8b56665f97cdc'], - }), - ('MooseX::Types::Perl', '0.101343', { - 'source_tmpl': 'MooseX-Types-Perl-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['f084beaf3c33209c68d05d4dbc24c25d604a6458b9738d96dceb086c8ef1325a'], - }), - ('Role::Tiny', '2.001004', { - 'source_tmpl': 'Role-Tiny-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['92ba5712850a74102c93c942eb6e7f62f7a4f8f483734ed289d08b324c281687'], - }), - ('Specio', '0.46', { - 'source_tmpl': 'Specio-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['0bf42aa116076d6efc18f72b72c7acb5638bd41c0aa09aecc12fc8bf9ceb9596'], - }), - ('Params::ValidationCompiler', '0.30', { - 'source_tmpl': 'Params-ValidationCompiler-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['dc5bee23383be42765073db284bed9fbd819d4705ad649c20b644452090d16cb'], - }), - ('Log::Dispatch', '2.70', { - 'source_tmpl': 'Log-Dispatch-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DR/DROLSKY'], - 'checksums': ['a3d91cc52467d3a3c6683103f3df4472d71e405a45f553289448713ac4293f21'], - }), - ('String::Flogger', '1.101245', { - 'source_tmpl': 'String-Flogger-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['aa03c08e01f802a358c175c6093c02adf9688659a087a8ddefdc3e9cef72640b'], - }), - ('Log::Dispatchouli', '2.019', { - 'source_tmpl': 'Log-Dispatchouli-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['fe8890c553c8f72b8b55633067270862c34db2ab8ff1cf8db0855f0427c23442'], - }), - ('Data::Section', '0.200007', { - 'source_tmpl': 'Data-Section-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['cd937e5b70e34aab885ff414e2a6d19e4783b7c28fc3cda5145b230514ebb4de'], - }), - ('Software::License', '0.103014', { - 'source_tmpl': 'Software-License-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/L/LE/LEONT'], - 'checksums': ['eb45ea602d75006683789fbba57a01c0a1f7037371de95ea54b91577535d1789'], - }), - ('MooseX::SetOnce', '0.200002', { - 'source_tmpl': 'MooseX-SetOnce-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['cbed06b7fcd353f0d99bf80a87c1c0b581160697231b3ad9a608da231ba2b659'], - }), - ('Term::Encoding', '0.03', { - 'source_tmpl': 'Term-Encoding-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA'], - 'checksums': ['95ba9687d735d25a3cbe64508d7894f009c7fa2a1726c3e786e9e21da2251d0b'], - }), - ('Config::MVP', '2.200011', { - 'source_tmpl': 'Config-MVP-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['23c95666fc43c4adaebcc093b1b56091efc2a6aa2d75366a216d18eda96ad716'], - }), - ('Throwable', '0.200013', { - 'source_tmpl': 'Throwable-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['9987d0deb5bddd352a6330cefbe932f882e36dd8c8a4564bcfd372dc396b8fa0'], - }), - ('Sub::Quote', '2.006006', { - 'source_tmpl': 'Sub-Quote-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/H/HA/HAARG'], - 'checksums': ['6e4e2af42388fa6d2609e0e82417de7cc6be47223f576592c656c73c7524d89d'], - }), - ('Role::Identifiable::HasIdent', '0.007', { - 'source_tmpl': 'Role-Identifiable-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['561346d1a1a07a45bd851d859a825a7f67925a7a3ba5ba58e0cdad8bb99073ad'], - }), - ('Role::HasMessage', '0.006', { - 'source_tmpl': 'Role-HasMessage-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['f6a6dbe0476ff95ee1ffbef825eb18d9b02b0618deba4686e7c63b99d576d4d3'], - }), - ('MooseX::OneArgNew', '0.005', { - 'source_tmpl': 'MooseX-OneArgNew-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['7e4fcf474ea6c4244f0885f1066729cfdc472fbd7190dd41b4b55bcd67c3103f'], - }), - ('MooseX::Role::Parameterized', '1.11', { - 'source_tmpl': 'MooseX-Role-Parameterized-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['1cfe766c5d7f0ecab57f733dcca430a2a2acd6b995757141b940ade3692bec9e'], - }), - ('MooseX::LazyRequire', '0.11', { - 'source_tmpl': 'MooseX-LazyRequire-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/E/ET/ETHER'], - 'checksums': ['ef620c1e019daf9cf3f23a943d25a94c91e93ab312bcd63be2e9740ec0b94288'], - }), - ('Mixin::Linewise::Readers', '0.108', { - 'source_tmpl': 'Mixin-Linewise-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['7df20678474c0973930a472b0c55e3f8e85b7790b68ab18ef618f9c453c8aef2'], - }), - ('Config::INI', '0.025', { - 'source_tmpl': 'Config-INI-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['628bf76d5b91f89dde22d4813ec033026ebf71b772bb61ccda909da00c869732'], - }), - ('String::Truncate', '1.100602', { - 'source_tmpl': 'String-Truncate-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['aaa3d4eec01136921484139133eb75d5c571fe51b0ad329f089e6d469a235f6e'], - }), - ('Pod::Eventual', '0.094001', { - 'source_tmpl': 'Pod-Eventual-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['be9fb8910b108e5d1a66f002b659ad22576e88d779b703dff9d15122c3f80834'], - }), - ('Pod::Elemental', '0.103005', { - 'source_tmpl': 'Pod-Elemental-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['824336ec18326e3b970e7815922b3921b0a821d2ee0e50b0c5b2bc327f99615e'], - }), - ('Pod::Weaver', '4.015', { - 'source_tmpl': 'Pod-Weaver-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['5af25b29a55783e495a9df5ef6293240e2c9ab02764613d79f1ed50b12dec5ae'], - }), - ('Dist::Zilla', '6.015', { - 'source_tmpl': 'Dist-Zilla-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/R/RJ/RJBS'], - 'checksums': ['2288e5819f942c2f7051fae265b8e3084c3ce155d4f409751396784366ab891b'], - }), - ('XML::RegExp', '0.04', { - 'source_tmpl': 'XML-RegExp-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TJ/TJMATHER'], - 'checksums': ['df1990096036085c8e2d45904fe180f82bfed40f1a7e05243f334ea10090fc54'], - }), - ('XML::DOM', '1.46', { - 'source_tmpl': 'XML-DOM-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/T/TJ/TJMATHER'], - 'checksums': ['8ba24b0b459b01d6c5e5b0408829c7d5dfe47ff79b3548c813759048099b175e'], - }), - ('Data::Dump', '1.23', { - 'source_tmpl': 'Data-Dump-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/G/GA/GAAS'], - 'checksums': ['af53b05ef1387b4cab4427e6789179283e4f0da8cf036e8db516ddb344512b65'], - }), - ('File::Next', '1.18', { - 'source_tmpl': 'File-Next-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PE/PETDANCE'], - 'checksums': ['f900cb39505eb6e168a9ca51a10b73f1bbde1914b923a09ecd72d9c02e6ec2ef'], - }), - ('App::cpanminus', '1.7044', { - 'source_tmpl': 'App-cpanminus-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/'], - 'checksums': ['9b60767fe40752ef7a9d3f13f19060a63389a5c23acc3e9827e19b75500f81f3'], - }), - # Below this point are packages added for JSC, not included in the upstream easybuild repositories - ('Alien::Base', '2.26', { - 'source_tmpl': 'Alien-Build-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['b104566f2c82e12913a276f1485270bb5f5b6bcb6ff9dbe3d098e99d8d74cf71'], - }), - ('File::chdir', '0.1011', { - 'source_tmpl': 'File-chdir-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN'], - 'checksums': ['31ebf912df48d5d681def74b9880d78b1f3aca4351a0ed1fe3570b8e03af6c79'], - }), - ('Alien::Libxml2', '0.16', { - 'source_tmpl': 'Alien-Libxml2-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/P/PL/PLICEASE'], - 'checksums': ['56f06a58054f788dcef8d3b6669fb47d172e9ca0b7a12d0241d9cf7835a53b97'], - }), - ('XML::LibXML', '2.0205', { - 'source_tmpl': 'XML-LibXML-%(version)s.tar.gz', - 'source_urls': ['http://search.cpan.org/CPAN/authors/id/S/SH/SHLOMIF'], - 'checksums': ['3a25002714b13f192d0baef5dc25ad2fbf09f8ec4ad1f793dec8fe6e2f5b2278'], - }), - ('Date::Calc', '6.4', { - 'source_tmpl': 'Date-Calc-%(version)s.tar.gz', - 'source_urls': ['http://search.cpan.org/CPAN/authors/id/S/ST/STBEY'], - 'checksums': ['7ce137b2e797b7c0901f3adf1a05a19343356cd1f04676aa1c56a9f624f859ad'], - }), - ('Pod::Parser', '1.63', { - 'source_tmpl': 'Pod-Parser-%(version)s.tar.gz', - 'source_urls': ['https://cpan.metacpan.org/authors/id/M/MA/MAREKR/'], - 'checksums': ['dbe0b56129975b2f83a02841e8e0ed47be80f060686c66ea37e529d97aa70ccd'], - }), -] - -moduleclass = 'lang' diff --git a/Golden_Repo/p/Pillow-SIMD/Pillow-SIMD-7.0.0.post3-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/p/Pillow-SIMD/Pillow-SIMD-7.0.0.post3-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 0cbade2f51f38d5d679de9bf05d2432c107de56f..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Pillow-SIMD/Pillow-SIMD-7.0.0.post3-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'Pillow-SIMD' -version = '7.0.0.post3' -versionsuffix = '-Python-%(pyver)s' - - -homepage = 'https://github.com/uploadcare/pillow-simd' -description = """Pillow-SIMD is a drop-in replacement for Pillow with AVX support -Pillow is the 'friendly PIL fork' by Alex Clark and Contributors. - PIL is the Python Imaging Library by Fredrik Lundh and Contributors.""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://github.com/uploadcare/pillow-simd/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [('binutils', '2.36.1')] - -dependencies = [ - ('Python', '3.8.5'), - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), - ('LibTIFF', '4.1.0'), - ('freetype', '2.10.1') -] - -use_pip = True -download_dep_fail = True - -options = {'modulename': 'PIL'} - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/PIL'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/Pillow-SIMD/Pillow-SIMD-7.0.0.post3-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/Pillow-SIMD/Pillow-SIMD-7.0.0.post3-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index c2110159fe17783ec386b8ca4e083c49df3e6ee2..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Pillow-SIMD/Pillow-SIMD-7.0.0.post3-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'Pillow-SIMD' -version = '7.0.0.post3' -versionsuffix = '-Python-%(pyver)s' - - -homepage = 'https://github.com/uploadcare/pillow-simd' -description = """Pillow-SIMD is a drop-in replacement for Pillow with AVX support -Pillow is the 'friendly PIL fork' by Alex Clark and Contributors. - PIL is the Python Imaging Library by Fredrik Lundh and Contributors.""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://github.com/uploadcare/pillow-simd/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [('binutils', '2.34')] - -dependencies = [ - ('Python', '3.8.5'), - ('libjpeg-turbo', '2.0.5'), - ('libpng', '1.6.37'), - ('zlib', '1.2.11'), - ('LibTIFF', '4.1.0'), - ('freetype', '2.10.1') -] - -use_pip = True -download_dep_fail = True - -options = {'modulename': 'PIL'} - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/PIL'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PostgreSQL/PostgreSQL-12.3-GCCcore-10.3.0.eb b/Golden_Repo/p/PostgreSQL/PostgreSQL-12.3-GCCcore-10.3.0.eb deleted file mode 100644 index 4d57d78c5d9ab6562791cb7be4b9a914fa381d65..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PostgreSQL/PostgreSQL-12.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'PostgreSQL' -version = '12.3' - -homepage = 'http://www.mysql.com/' -description = """PostgreSQL is a powerful, open source object-relational - database system. It is fully ACID compliant, has full support for foreign - keys, joins, views, triggers, and stored procedures (in multiple languages). - It includes most SQL:2008 data types, including INTEGER, - NUMERIC, BOOLEAN, CHAR, VARCHAR, DATE, INTERVAL, and TIMESTAMP. - It also supports storage of binary large objects, including pictures, - sounds, or video. It has native programming interfaces for C/C++, Java, - .Net, Perl, Python, Ruby, Tcl, ODBC, among others, and exceptional - documentation. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['http://ftp.postgresql.org/pub/source/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -osdependencies = [('openssl-devel', 'libssl-dev')] - -dependencies = [ - ('Java', '15', '', SYSTEM), - ('libreadline', '8.0'), - ('zlib', '1.2.11'), -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Bison', '3.7.6'), - ('flex', '2.6.4'), - ('Perl', '5.32.0'), -] - -configopts = 'LDFLAGS="$LDFLAGS -lpthread"' - -sanity_check_paths = { - 'files': ['bin/psql'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/PostgreSQL/PostgreSQL-12.3-GCCcore-9.3.0.eb b/Golden_Repo/p/PostgreSQL/PostgreSQL-12.3-GCCcore-9.3.0.eb deleted file mode 100644 index a869d62bf93773a4b99e88d4f7efd384b7c0bad5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PostgreSQL/PostgreSQL-12.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'PostgreSQL' -version = '12.3' - -homepage = 'http://www.mysql.com/' -description = """PostgreSQL is a powerful, open source object-relational - database system. It is fully ACID compliant, has full support for foreign - keys, joins, views, triggers, and stored procedures (in multiple languages). - It includes most SQL:2008 data types, including INTEGER, - NUMERIC, BOOLEAN, CHAR, VARCHAR, DATE, INTERVAL, and TIMESTAMP. - It also supports storage of binary large objects, including pictures, - sounds, or video. It has native programming interfaces for C/C++, Java, - .Net, Perl, Python, Ruby, Tcl, ODBC, among others, and exceptional - documentation. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['http://ftp.postgresql.org/pub/source/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -osdependencies = [('openssl-devel', 'libssl-dev')] - -dependencies = [ - ('Java', '1.8', '', SYSTEM), - ('libreadline', '8.0'), - ('zlib', '1.2.11'), -] - -builddependencies = [ - ('binutils', '2.34'), - ('Bison', '3.6.4'), - ('flex', '2.6.4'), - ('Perl', '5.32.0'), -] - -configopts = 'LDFLAGS="$LDFLAGS -lpthread"' - -sanity_check_paths = { - 'files': ['bin/psql'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/PyCUDA/PyCUDA-2020.1-GCCcore-10.3.0.Python-3.8.5.eb b/Golden_Repo/p/PyCUDA/PyCUDA-2020.1-GCCcore-10.3.0.Python-3.8.5.eb deleted file mode 100644 index a3bd72c714b5ea2d11dec2a87d649f98773faca3..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyCUDA/PyCUDA-2020.1-GCCcore-10.3.0.Python-3.8.5.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyCUDA' -version = '2020.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pypi.python.org/pypi/pycuda/' -description = """ -PyCUDA lets you access Nvidia‘s CUDA parallel computation API from Python. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [PYPI_LOWER_SOURCE] - -builddependencies = [ - ('binutils', '2.36.1') -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('Boost.Python', '1.74.0', '-nompi'), - ('CUDA', '11.3', '', SYSTEM), -] - -local_prebuildopts = "./configure.py --cuda-root=$EBROOTCUDA --boost-inc-dir=$EBROOTBOOST/include/boost/ " -local_prebuildopts += "--boost-lib-dir=$EBROOTBOOST/lib/ --no-use-shipped-boost " -local_prebuildopts += "--boost-python-libname=boost_python38 && " - -use_pip = True -sanity_pip_check = True - -exts_default_options = { - 'source_urls': [PYPI_LOWER_SOURCE], - 'source_tmpl': SOURCELOWER_TAR_GZ, -} - -exts_list = [ - ('pytools', '2020.4.4', { - 'checksums': ['3645ed839cf4d79cb4bf030f37ddaeecd7fe5e2d6698438cc36c24a1d5168809'], - }), - (name, version, { - 'prebuildopts': local_prebuildopts, - 'use_pip': False, - 'checksums': ['effa3b99b55af67f3afba9b0d1b64b4a0add4dd6a33bdd6786df1aa4cc8761a5'], - }), -] - -moduleclass = 'lang' diff --git a/Golden_Repo/p/PyCUDA/PyCUDA-2020.1-GCCcore-9.3.0.Python-3.8.5.eb b/Golden_Repo/p/PyCUDA/PyCUDA-2020.1-GCCcore-9.3.0.Python-3.8.5.eb deleted file mode 100644 index bc5baf32c7650dcc907b88f9c5491049de33438b..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyCUDA/PyCUDA-2020.1-GCCcore-9.3.0.Python-3.8.5.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyCUDA' -version = '2020.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pypi.python.org/pypi/pycuda/' -description = """ -PyCUDA lets you access Nvidia‘s CUDA parallel computation API from Python. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [PYPI_LOWER_SOURCE] - -builddependencies = [ - ('binutils', '2.34') -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('Boost.Python', '1.74.0', '-nompi'), - ('CUDA', '11.0', '', SYSTEM), -] - -local_prebuildopts = "./configure.py --cuda-root=$EBROOTCUDA --boost-inc-dir=$EBROOTBOOST/include/boost/ " -local_prebuildopts += "--boost-lib-dir=$EBROOTBOOST/lib/ --no-use-shipped-boost " -local_prebuildopts += "--boost-python-libname=boost_python38 && " - -use_pip = True -sanity_pip_check = True - -exts_default_options = { - 'source_urls': [PYPI_LOWER_SOURCE], - 'source_tmpl': SOURCELOWER_TAR_GZ, -} - -exts_list = [ - ('pytools', '2020.4.4', { - 'checksums': ['3645ed839cf4d79cb4bf030f37ddaeecd7fe5e2d6698438cc36c24a1d5168809'], - }), - (name, version, { - 'prebuildopts': local_prebuildopts, - 'use_pip': False, - 'checksums': ['effa3b99b55af67f3afba9b0d1b64b4a0add4dd6a33bdd6786df1aa4cc8761a5'], - }), -] - -moduleclass = 'lang' diff --git a/Golden_Repo/p/PyCairo/PyCairo-1.18.2-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/p/PyCairo/PyCairo-1.18.2-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 9c805c4826c98d404e9acbebe6ecfe8c22098aed..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyCairo/PyCairo-1.18.2-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'PyCairo' -version = '1.18.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://cairographics.org/pycairo/' -description = """Python bindings for the cairo library""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/pygobject/pycairo/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -download_dep_fail = True - -builddependencies = [ - ('binutils', '2.36.1'), - ('Coreutils', '8.32'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('cairo', '1.17.2'), -] - -# PyGTK needs PyCairo installed by pip -use_pip = True -sanity_pip_check = True - -# Don't build a wheel or the pkg-cfg file won't be installed -installopts = '--no-binary=%(namelower)s' - -options = {'modulename': 'cairo'} - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages', 'lib64/python%(pyshortver)s/site-packages'], -} - -sanity_check_paths = { - 'files': ['%s/%s.%s' % (p, n, e) - for (p, e) in [('include/pycairo', 'h'), ('lib/pkgconfig', 'pc')] for n in ['py3cairo']], - 'dirs': ['lib/python%(pyshortver)s/site-packages/cairo'], -} - -options = {'modulename': 'cairo'} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PyCairo/PyCairo-1.18.2-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/PyCairo/PyCairo-1.18.2-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index a728a40dc2e7f436f686d483e56f0586c716227b..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyCairo/PyCairo-1.18.2-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'PyCairo' -version = '1.18.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://cairographics.org/pycairo/' -description = """Python bindings for the cairo library""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/pygobject/pycairo/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -download_dep_fail = True - -builddependencies = [ - ('binutils', '2.34'), - ('Coreutils', '8.32'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('cairo', '1.17.2'), -] - -# PyGTK needs PyCairo installed by pip -use_pip = True -sanity_pip_check = True - -# Don't build a wheel or the pkg-cfg file won't be installed -installopts = '--no-binary=%(namelower)s' - -options = {'modulename': 'cairo'} - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages', 'lib64/python%(pyshortver)s/site-packages'], -} - -sanity_check_paths = { - 'files': ['%s/%s.%s' % (p, n, e) - for (p, e) in [('include/pycairo', 'h'), ('lib/pkgconfig', 'pc')] for n in ['py3cairo']], - 'dirs': ['lib/python%(pyshortver)s/site-packages/cairo'], -} - -options = {'modulename': 'cairo'} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PyFerret/Faddpath b/Golden_Repo/p/PyFerret/Faddpath deleted file mode 100755 index e97792348b685397f6c75bcffc2aa697be7749ea..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyFerret/Faddpath +++ /dev/null @@ -1,11 +0,0 @@ -# Wrapper to compensate for the lack of function definition (defined when sourcing a PyFerret script) -#!/bin/sh -if [ -n "$*" ] -then - export FER_GO="$FER_GO $*" - export FER_DATA="$FER_DATA $*" - export FER_DESCR="$FER_DESCR $*" - export FER_GRIDS="$FER_GRIDS $*" -else - echo " Usage: Faddpath new_directory_1 ..." -fi diff --git a/Golden_Repo/p/PyFerret/PyFerret-7.6.3-gpsmkl-2020-Python-3.8.5.eb b/Golden_Repo/p/PyFerret/PyFerret-7.6.3-gpsmkl-2020-Python-3.8.5.eb deleted file mode 100644 index c3fc983f42e8f61935e007745a9616463676a521..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyFerret/PyFerret-7.6.3-gpsmkl-2020-Python-3.8.5.eb +++ /dev/null @@ -1,96 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'PyFerret' -version = '7.6.3' -local_dataset_ver = '7.6' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://ferret.pmel.noaa.gov/' -description = '''PyFerret is an interactive computer visualization and analysis environment -designed to meet the needs of oceanographers and meteorologists analyzing large and complex gridded data sets.''' - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': False} - -source_urls = [ - 'https://github.com/NOAA-PMEL/PyFerret/archive/', - 'https://github.com/NOAA-PMEL/FerretDatasets/archive/' -] -sources = [ - 'v%(version)s.tar.gz', - 'v7.6.tar.gz' -] - -checksums = [ - ('sha256', 'f062c20bedf64713f57c3d45aaeb63eb9b4d3ba622e5c5e4a99e61fdf08d2224'), - ('sha256', 'b2fef758ec1817c1c19e6225857ca3a82c727d209ed7fd4697d45c5533bb2c72'), -] - -patches = [ - ('pyferret', 'bin'), - ('Faddpath', 'bin'), - ('configure_pyferret-7.6.3_stage2020.patch') -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('HDF5', '1.10.6'), - ('zlib', '1.2.11'), - ('cURL', '7.71.1'), - ('Pango', '1.44.7'), - ('libpng', '1.6.37'), - ('cairo', '1.17.2'), - ('freetype', '2.10.1'), - ('fontconfig', '2.13.92'), - ('pixman', '0.40.0'), - ('GLib', '2.64.4'), - ('PyQt5', '5.15.1', versionsuffix), -] - -maxparallel = 1 # yes! - -skipsteps = ['configure'] -start_dir = '%(builddir)s/%(name)s-%(version)s' - -prebuildopts = ['export FER_SRC_PREFIX=%(builddir)s/%(name)s-%(version)s && '] - -preinstallopts = ['export FER_DIR=%(installdir)s && '] - -modextrapaths = { - 'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages', - 'LD_LIBRARY_PATH': 'lib/python%(pyshortver)s/site-packages/pyferret', -} - -modextravars = { - 'FER_DIR': '%(installdir)s', - 'FER_DSETS': '%(installdir)s/datasets', - # 'FER_WEB_BROWSER': 'firefox', # Probably nobody should use this - 'FER_DATA_THREDDS': 'http://ferret.pmel.noaa.gov/geoide/geoIDECleanCatalog.xml %(installdir)s/datasets', - 'FER_DATA': '. %(installdir)s/datasets/data %(installdir)s/go %(installdir)s/examples', - 'FER_DESCR': '. %(installdir)s/datasets/descr', - 'FER_GRIDS': '. %(installdir)s/datasets/grids', - 'FER_GO': '. %(installdir)s/go %(installdir)s/examples %(installdir)s/contrib', - 'FER_EXTERNAL_FUNCTIONS': '%(installdir)s/ext_func/libs', - 'PYFER_EXTERNAL_FUNCTIONS': '%(installdir)s/ext_func/pylibs', - 'FER_PALETTE': '. %(installdir)s/ppl', - 'SPECTRA': '%(installdir)s/ppl', - 'FER_FONTS': '%(installdir)s/ppl/fonts', - 'PLOTFONTS': '%(installdir)s/ppl/fonts', - 'FER_LIBS': '%(installdir)s/lib', - 'FER_DAT': '%(installdir)s', -} - -postinstallcmds = [ - 'chmod +x %(installdir)s/bin/pyferret', - 'mkdir %(installdir)s/datasets', - 'mv %%(builddir)s/FerretDatasets-%s/data %%(installdir)s/datasets' % local_dataset_ver, - 'mv %%(builddir)s/FerretDatasets-%s/descr %%(installdir)s/datasets' % local_dataset_ver, - 'mv %%(builddir)s/FerretDatasets-%s/grids %%(installdir)s/datasets' % local_dataset_ver, -] - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PyFerret/PyFerret-7.6.3-gpsmkl-2021-Python-3.8.5.eb b/Golden_Repo/p/PyFerret/PyFerret-7.6.3-gpsmkl-2021-Python-3.8.5.eb deleted file mode 100644 index fd0949da5f7eb6605dd14b6dfa3de233c0b1a2a3..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyFerret/PyFerret-7.6.3-gpsmkl-2021-Python-3.8.5.eb +++ /dev/null @@ -1,98 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'PyFerret' -version = '7.6.3' -local_dataset_ver = '7.6' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://ferret.pmel.noaa.gov/' -description = '''PyFerret is an interactive computer visualization and analysis environment -designed to meet the needs of oceanographers and meteorologists analyzing large and complex gridded data sets.''' - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'usempi': False} - -source_urls = [ - 'https://github.com/NOAA-PMEL/PyFerret/archive/', - 'https://github.com/NOAA-PMEL/FerretDatasets/archive/' -] -sources = [ - 'v%(version)s.tar.gz', - 'v7.6.tar.gz' -] - -checksums = [ - ('sha256', 'f062c20bedf64713f57c3d45aaeb63eb9b4d3ba622e5c5e4a99e61fdf08d2224'), - ('sha256', 'b2fef758ec1817c1c19e6225857ca3a82c727d209ed7fd4697d45c5533bb2c72'), -] - -patches = [ - ('pyferret', 'bin'), - ('Faddpath', 'bin'), - ('configure_pyferret-7.6.3_stage2021.patch') -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('netCDF', '4.7.4'), - ('netCDF-Fortran', '4.5.3'), - ('HDF5', '1.10.6'), - ('zlib', '1.2.11'), - ('cURL', '7.71.1'), - ('Pango', '1.44.7'), - ('libpng', '1.6.37'), - ('cairo', '1.17.2'), - ('freetype', '2.10.1'), - ('fontconfig', '2.13.92'), - ('pixman', '0.40.0'), - ('GLib', '2.64.4'), - ('PyQt5', '5.15.1', versionsuffix), -] - -maxparallel = 1 # yes! - -skipsteps = ['configure'] -start_dir = '%(builddir)s/%(name)s-%(version)s' - -prebuildopts = [( - 'export FER_SRC_PREFIX=%(builddir)s/%(name)s-%(version)s && ' -)] - -preinstallopts = ['export FER_DIR=%(installdir)s && '] - -modextrapaths = { - 'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages', - 'LD_LIBRARY_PATH': 'lib/python%(pyshortver)s/site-packages/pyferret', -} - -modextravars = { - 'FER_DIR': '%(installdir)s', - 'FER_DSETS': '%(installdir)s/datasets', - # 'FER_WEB_BROWSER': 'firefox', # Probably nobody should use this - 'FER_DATA_THREDDS': 'http://ferret.pmel.noaa.gov/geoide/geoIDECleanCatalog.xml %(installdir)s/datasets', - 'FER_DATA': '. %(installdir)s/datasets/data %(installdir)s/go %(installdir)s/examples', - 'FER_DESCR': '. %(installdir)s/datasets/descr', - 'FER_GRIDS': '. %(installdir)s/datasets/grids', - 'FER_GO': '. %(installdir)s/go %(installdir)s/examples %(installdir)s/contrib', - 'FER_EXTERNAL_FUNCTIONS': '%(installdir)s/ext_func/libs', - 'PYFER_EXTERNAL_FUNCTIONS': '%(installdir)s/ext_func/pylibs', - 'FER_PALETTE': '. %(installdir)s/ppl', - 'SPECTRA': '%(installdir)s/ppl', - 'FER_FONTS': '%(installdir)s/ppl/fonts', - 'PLOTFONTS': '%(installdir)s/ppl/fonts', - 'FER_LIBS': '%(installdir)s/lib', - 'FER_DAT': '%(installdir)s', -} - -postinstallcmds = [ - 'chmod +x %(installdir)s/bin/pyferret', - 'mkdir %(installdir)s/datasets', - 'mv %%(builddir)s/FerretDatasets-%s/data %%(installdir)s/datasets' % local_dataset_ver, - 'mv %%(builddir)s/FerretDatasets-%s/descr %%(installdir)s/datasets' % local_dataset_ver, - 'mv %%(builddir)s/FerretDatasets-%s/grids %%(installdir)s/datasets' % local_dataset_ver, -] - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PyFerret/configure_pyferret-7.6.3_stage2020.patch b/Golden_Repo/p/PyFerret/configure_pyferret-7.6.3_stage2020.patch deleted file mode 100644 index 39846aaef0874e38f911531151609f56ef3b72e4..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyFerret/configure_pyferret-7.6.3_stage2020.patch +++ /dev/null @@ -1,438 +0,0 @@ -diff -Naur PyFerret-7.6.3.orig/external_functions/ef_utility/site_specific.mk PyFerret-7.6.3/external_functions/ef_utility/site_specific.mk ---- PyFerret-7.6.3.orig/external_functions/ef_utility/site_specific.mk 1970-01-01 01:00:00.000000000 +0100 -+++ PyFerret-7.6.3/external_functions/ef_utility/site_specific.mk 2020-12-17 15:22:43.154040000 +0100 -@@ -0,0 +1,75 @@ -+## Site-dependent definitions included in external function Makefiles -+## of an installed PyFerret directory. -+ -+## ========================= -+## Machine type for which to build Ferret/PyFerret -+## x86_64-linux for 64-bit RHEL -+## x86_64-linux-gnu for 64-bit Ubuntu and many "free" Linux systems -+## i386-linux for 32-bit RHEL -+## i386-linux-gnu for 32-bit Ubuntu and many "free" Linux systems -+## intel-mac for Max OSX -+## This value is used to determine which platform_specific.mk -+## file to include in the Makefiles. -+## ========================= -+# BUILDTYPE = $(HOSTTYPE) -+BUILDTYPE = x86_64-linux -+# BUILDTYPE = x86_64-linux-gnu -+# BUILDTYPE = i386-linux -+# BUILDTYPE = i386-linux-gnu -+# BUILDTYPE = intel-mac -+ -+## ========================= -+## INSTALL_FER_DIR and PYTHON_EXE are only used to construct -+## the location of pyferret library. The library should be -+## (for either 32-bit or 64-bit Linux) -+## $(INSTALL_FER_DIR)/lib/$(PYTHON_EXE)/site-package/pyferret/libpyferret.so -+## or possibly (for 64-bit Linux only) -+## $(INSTALL_FER_DIR)/lib64/$(PYTHON_EXE)/site-package/pyferret/libpyferret.so -+## -+## PyFerret installation directory, usually just $(FER_DIR) -+## Must be $(FER_DIR) when building pyferret from source. -+## (This file is also found in PyFerret installations, thus the option.) -+## ========================= -+# INSTALL_FER_DIR = $(HOME)/PyFerret -+INSTALL_FER_DIR = $(FER_DIR) -+ -+## ========================= -+## C and Fortran compilers to use. The construct "$(shell which gcc)" assigns -+## the response to "which gcc" run from a Bourne shell (such as bash). -+## When compiling for Mac OS X, one may wish to use clang instead of gcc. -+## If you wish to use values already defined in you shell environment when -+## you run make, comment out all definitions of CC and FC (do NOT leave blank). -+## ========================= -+# CC = $(shell which clang) -+CC = $(shell which gcc) -+FC = $(shell which gfortran) -+ -+## ========================= -+## Linker used to generate executables and shared-object libraries. -+## Normally the Fortran compiler is used which then calls the actual -+## linker with appropriate flags and system-level Fortran libraries. -+## The construct "$(shell which gfortran)" assigns the response to -+## "which gfortran" run from a Bourne shell (such as bash). -+## If you wish to use a value already defined in you shell environment when -+## you run make, comment out all definitions of LD (do NOT leave blank). -+## ========================= -+LD = $(shell which gfortran) -+ -+## ========================= -+## Python version used by PyFerret -+## ========================= -+# PYTHON_EXE = python2.6 -+# PYTHON_EXE = python2.7 -+# PYTHON_EXE = python3.6 -+# PYTHON_EXE = python3.7 -+PYTHON_EXE = python3.8 -+ -+## ========================= -+## FER_LOCAL_EXTFCNS is the directory in which to install -+## the Ferret Fortran external functions. The example -+## functions that come with the PyFerret installation are -+## installed in $(INSTALL_FER_DIR)/ext_func/pylibs -+## ========================= -+FER_LOCAL_EXTFCNS = $(INSTALL_FER_DIR)/ext_func/pylibs -+ -+## -diff -Naur PyFerret-7.6.3.orig/Makefile PyFerret-7.6.3/Makefile ---- PyFerret-7.6.3.orig/Makefile 2020-09-28 21:10:48.000000000 +0200 -+++ PyFerret-7.6.3/Makefile 2020-12-17 14:37:08.501952000 +0100 -@@ -74,10 +74,12 @@ - export CFLAGS="$(CFLAGS) -DNDEBUG -O" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -@@ -94,10 +96,12 @@ - export CFLAGS="$(CFLAGS) -DNDEBUG -O" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -@@ -117,10 +121,12 @@ - export CFLAGS="$(CFLAGS) -UNDEBUG -O0 -g" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -@@ -137,10 +143,12 @@ - export CFLAGS="$(CFLAGS) -UNDEBUG -O0 -g" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -@@ -185,10 +193,12 @@ - export CFLAGS="$(CFLAGS) -O" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -diff -Naur PyFerret-7.6.3.orig/platform_specific.mk.x86_64-linux PyFerret-7.6.3/platform_specific.mk.x86_64-linux ---- PyFerret-7.6.3.orig/platform_specific.mk.x86_64-linux 2020-09-28 21:10:48.000000000 +0200 -+++ PyFerret-7.6.3/platform_specific.mk.x86_64-linux 2020-12-17 14:36:27.694118000 +0100 -@@ -17,6 +17,7 @@ - # Include directories - # - NETCDF_INCLUDE = -I$(NETCDF_LIBDIR)/../include -+ NETCDFFORTRAN_INCLUDE = -I$(NETCDFFORTRAN_LIBDIR)/../include - - ifeq ($(strip $(HDF5_LIBDIR)),) - HDF5_INCLUDE = -@@ -36,6 +37,12 @@ - PIXMAN_INCLUDE = -I$(PIXMAN_LIBDIR)/../include - endif - -+ifeq ($(strip $(HARFBUZZ_LIBDIR)),) -+ HARFBUZZ_INCLUDE = -I/usr/include/harfbuzz -+else -+ HARFBUZZ_INCLUDE = -I$(HARFBUZZ_LIBDIR)/../include/harfbuzz -+endif -+ - ifeq ($(strip $(PANGO_LIBDIR)),) - PANGO_INCLUDE = -I/usr/include/pango-1.0 - else -@@ -64,9 +71,11 @@ - -I$(DIR_PREFIX)/pyfermod \ - -I$(DIR_PREFIX)/external_functions/ef_utility \ - $(NETCDF_INCLUDE) \ -+ $(NETCDFFORTRAN_INCLUDE) \ - $(HDF5_INCLUDE) \ - $(CAIRO_INCLUDE) \ - $(PIXMAN_INCLUDE) \ -+ $(HARFBUZZ_INCLUDE) \ - $(PANGO_INCLUDE) \ - $(GLIB2_INCLUDE) - -diff -Naur PyFerret-7.6.3.orig/setup.py PyFerret-7.6.3/setup.py ---- PyFerret-7.6.3.orig/setup.py 2020-09-28 21:10:48.000000000 +0200 -+++ PyFerret-7.6.3/setup.py 2020-12-17 14:36:06.465376000 +0100 -@@ -30,6 +30,13 @@ - if not netcdf_libdir: - raise ValueError("Environment variable NETCDF_LIBDIR is not defined") - -+# NETCDFFORTAN_LIBDIR must be given, either for the static library or the shared-object library -+netcdffortran_libdir = os.getenv("NETCDFFORTRAN_LIBDIR") -+if netcdffortran_libdir: -+ netcdffortran_libdir = netcdffortran_libdir.strip() -+else: -+ netcdffortran_libdir = netcdf_libdir -+ - # HDF5_LIBDIR is only given if the HDF5 and NetCDF libraries are to be statically linked - hdf5_libdir = os.getenv("HDF5_LIBDIR") - if hdf5_libdir: -@@ -50,6 +57,11 @@ - if pixman_libdir: - pixman_libdir = pixman_libdir.strip() - -+# HARFBUZZ gives a non-standard location of the harfbuzz libraries -+harfbuzz_libdir = os.getenv("HARFBUZZ_LIBDIR") -+if harfbuzz_libdir: -+ harfbuzz_libdir = harfbuzz_libdir.strip() -+ - # PANGO_LIBDIR gives a non-standard location of the pango libraries - pango_libdir = os.getenv("PANGO_LIBDIR") - if pango_libdir: -@@ -66,6 +78,8 @@ - - # The list of additional directories to examine for libraries - libdir_list = [ "lib", netcdf_libdir, ] -+if netcdffortran_libdir: -+ libdir_list.append(netcdffortran_libdir) - if hdf5_libdir: - libdir_list.append(hdf5_libdir) - if sz_libdir: -@@ -74,6 +88,8 @@ - libdir_list.append(cairo_libdir) - if pixman_libdir: - libdir_list.append(pixman_libdir) -+if harfbuzz_libdir: -+ libdir_list.append(harfbuzz_libdir) - if pango_libdir: - libdir_list.append(pango_libdir) - libdir_list.append(python_libdir) -@@ -103,7 +119,7 @@ - # The hdf5 libraries are only used to resolve netcdf library function - # calls when statically linking in the netcdf libraries. - if hdf5_libdir: -- netcdff_lib = os.path.join(netcdf_libdir, "libnetcdff.a") -+ netcdff_lib = os.path.join(netcdffortran_libdir, "libnetcdff.a") - addn_link_args.append(netcdff_lib) - netcdf_lib = os.path.join(netcdf_libdir, "libnetcdf.a") - addn_link_args.append(netcdf_lib) -diff -Naur PyFerret-7.6.3.orig/site_specific.mk PyFerret-7.6.3/site_specific.mk ---- PyFerret-7.6.3.orig/site_specific.mk 1970-01-01 01:00:00.000000000 +0100 -+++ PyFerret-7.6.3/site_specific.mk 2020-12-17 14:41:42.140897914 +0100 -@@ -0,0 +1,195 @@ -+## Site-dependent definitions included in Makefiles -+ -+## !!! Also verify the values in external_functions/ef_utility/site_specific.mk !!! -+ -+## ========================= -+## Full path name of the directory containing this file (the ferret root directory). -+## Do not use $(shell pwd) since this is included in Makefiles in other directories. -+## ========================= -+# DIR_PREFIX = $(HOME)/build/pyferret_dev -+# DIR_PREFIX = $(HOME)/svn/pyferret -+DIR_PREFIX = $(FER_SRC_PREFIX) -+ -+## ========================= -+## Installation directory for built PyFerret. -+## Using the "install" Makefile target creates a generic pyferret-*.tar.gz file -+## and then extracts it to create a PyFerret installation at this location. -+## ========================= -+# INSTALL_FER_DIR = $(HOME)/ferret_distributions/rhel6_64 -+INSTALL_FER_DIR = $(FER_DIR) -+ -+## ========================= -+## Machine type for which to build Ferret/PyFerret -+## x86_64-linux for 64-bit RHEL -+## x86_64-linux-gnu for 64-bit Ubuntu and many "free" Linux systems -+## i386-linux for 32-bit RHEL -+## i386-linux-gnu for 32-bit Ubuntu and many "free" Linux systems -+## intel-mac for Mac OSX -+## ========================= -+# BUILDTYPE = $(HOSTTYPE) -+BUILDTYPE = x86_64-linux -+# BUILDTYPE = x86_64-linux-gnu -+# BUILDTYPE = i386-linux -+# BUILDTYPE = i386-linux-gnu -+# BUILDTYPE = intel-mac -+ -+## ========================= -+## C and Fortran compilers to use. The construct "$(shell which gcc)" assigns -+## the response to "which gcc" run from a Bourne shell (such as bash). -+## When compiling for Mac OS X, one may wish to use clang instead of gcc. -+## If you wish to use values already defined in you shell environment when -+## you run make, comment out all definitions of CC and FC (do NOT leave blank). -+## ========================= -+# CC = $(shell which clang) -+CC = $(shell which gcc) -+FC = $(shell which gfortran) -+ -+## ========================= -+## Python executable to invoke for build and install. -+## ========================= -+# PYTHON_EXE = python2.6 -+# PYTHON_EXE = python2.7 -+# PYTHON_EXE = python3.6 -+# PYTHON_EXE = python3.7 -+PYTHON_EXE = python3.8 -+ -+## ========================= -+## Full path to the python include files directory. -+## Should not need any modifications. -+## ========================= -+PYTHONINCDIR := $(shell $(PYTHON_EXE) -c "from __future__ import print_function ; import distutils.sysconfig; print(distutils.sysconfig.get_python_inc())") -+ -+## ========================= -+## If given and not empty, the full path name to the gfortran library to use. -+## This is primarily used for the intel-mac build. -+## The given scripts (commented out) should provide the correct value for the gfortran libraries. -+## If empty or not given, the gfortran library is linked in using the "-lgfortran" flag. -+## ========================= -+# GFORTRAN_LIB = $(shell $(FC) --print-file-name=libgfortran.dylib) -+# GFORTRAN_LIB = $(shell $(FC) --print-file-name=libgfortran.a) -+GFORTRAN_LIB = -+ -+## ========================= -+## Directory containing the Cairo static libraries (v1.12 or later). -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, the Cairo static libraries found under this directory will be used. -+## If empty or not given, the system's Cairo shared libraries will be used. -+## ========================= -+# CAIRO_LIBDIR = /usr/local/cairo/lib -+# CAIRO_LIBDIR = $(HOME)/.local/lib -+# CAIRO_LIBDIR = /usr/local/lib -+# CAIRO_LIBDIR = /p/software/juwels/stages/Devel-2020/software/cairo/1.17.2-GCCcore-9.3.0/lib64 -+CAIRO_LIBDIR = $(EBROOTCAIRO)/lib -+ -+## ========================= -+## Directory containing the Pixman static libraries used by the above Cairo static libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, the Pixman-1 static libraries found in this directory will be used. -+## If empty or not given, the system's Pixman-1 shared library will be used. -+## This value should be empty or not given if CAIRO_LIBDIR is empty or not given. -+## ========================= -+# PIXMAN_LIBDIR = /usr/local/pixman/lib -+# PIXMAN_LIBDIR = $(HOME)/.local/lib -+# PIXMAN_LIBDIR = /usr/local/lib -+# PIXMAN_LIBDIR = /gpfs/software/juwels/stages/Devel-2020/software/pixman/0.40.0-GCCcore-9.3.0/lib -+PIXMAN_LIBDIR = $(EBROOTPIXMAN)/lib -+ -+## ========================= -+## Directory containing the HarfBuzz shared libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, the HarfBuzz shared libraries under this directory are used. -+## If empty or not given, the system's HarfBuzz shared libraries are used. -+## This value should be empty or not given if CAIRO_LIBDIR is empty or not given. -+## ========================= -+#HARFBUZZ_LIBDIR = /p/software/juwels/stages/Devel-2020/software/HarfBuzz/2.6.7-GCCcore-9.3.0/lib64 -+HARFBUZZ_LIBDIR = $(EBROOTHARFBUZZ)/lib -+ -+## ========================= -+## Directory containing the Pango shared libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, the Pango shared libraries under this directory are used. -+## If empty or not given, the system's Pango shared libraries are used. -+## This value should be empty or not given if CAIRO_LIBDIR is empty or not given. -+## ========================= -+# PANGO_LIBDIR = $(HOME)/.local/lib -+# PANGO_LIBDIR = /usr/local/lib -+# PANGO_LIBDIR = /gpfs/software/juwels/stages/Devel-2020/software/Pango/1.44.7-GCCcore-9.3.0/lib -+PANGO_LIBDIR = $(EBROOTPANGO)/lib -+ -+## ========================= -+## Library directory containing a "glib-2.0" subdirectory with GLib-2.0 include file(s) -+## (yes, a little unusual) used by the above Pango shared libraries. -+## An "include" sibling directory containing a "glib-2.0" subdirectory with more -+## GLib-2.0 include files is assumed to exist. -+## If given and not empty, GLib-2.0 include files under this directory are used. -+## If empty or not given, the system's GLib-2.0 shared libraries are used. -+## This value should be empty or not given if PANGO_LIBDIR is empty or not given. -+## ========================= -+# GLIB2_LIBDIR = $(HOME)/.local/lib -+# GLIB2_LIBDIR = /usr/local/lib -+# GLIB2_LIBDIR = /gpfs/software/juwels/stages/Devel-2020/software/GLib/2.64.4-GCCcore-9.3.0/lib -+GLIB2_LIBDIR = $(EBROOTGLIB)/lib -+ -+## ========================= -+## Directory containing the HDF5 static libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, HDF5 and NetCDF static libraries will be used. -+## If empty or not given, NetCDF shared libraries (which specify the HDF5 and -+## compression libraries required) will be used. -+## ========================= -+# HDF5_LIBDIR = /usr/local/hdf5/lib -+# HDF5_LIBDIR = $(HOME)/.local/lib -+# HDF5_LIBDIR = /usr/local/lib -+# HDF5_LIBDIR = /usr/lib64 -+# HDF5_LIBDIR = /usr/lib -+# HDF5_LIBDIR = /p/software/juwels/stages/Devel-2020/software/HDF5/1.10.6-gompi-2020/lib64 -+# HDF5_LIBDIR = $(EBROOTHDF5)/lib -+ -+## ========================= -+## Location of the SZ compression static library. -+## This value should be given only if the SZ compression library was used in -+## building the HDF5 library, and the NetCDF and HDF5 static libraries are being -+## used (HDF5_LIBDIR is given and not empty). -+## If given and not empty, the SZ compression static library is used. -+## If empty or not given, the SZ compression library will not be used -+## (which is what you want if the HDF5 libraries were built using the Z compression library). -+## ========================= -+# SZ_LIBDIR = $(HOME)/.local/lib -+# SZ_LIBDIR = /usr/local/lib -+# SZ_LIBDIR = /usr/lib64 -+# SZ_LIBDIR = /usr/lib -+# SZ_LIBDIR = /gpfs/software/juwels/stages/Devel-2020/software/Szip/2.1.1-GCCcore-9.3.0/lib -+SZ_LIBDIR = $(EBROOTSZIP)/lib -+ -+## ========================= -+## Location of the NetCDF libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If HDF5_LIBDIR is given and not empty, the static libraries will be used -+## (along with the HDF5 static libraries). -+## If HDF5_LIBDIR is empty or not given, NetCDF shared libraries will be used. -+## ========================= -+# NETCDF_LIBDIR = /usr/local/netcdf/lib -+# NETCDF_LIBDIR = $(HOME)/.local/lib -+# NETCDF_LIBDIR = /usr/local/lib -+# NETCDF_LIBDIR = /usr/lib64 -+# NETCDF_LIBDIR = /usr/lib -+# NETCDF_LIBDIR = /p/software/juwels/stages/Devel-2020/software/netCDF/4.7.4-gompi-2020/lib64 -+NETCDF_LIBDIR = $(EBROOTNETCDF)/lib64 -+ -+## ========================= -+## Location of the NetCDF fortran libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If HDF5_LIBDIR is given and not empty, the static libraries will be used -+## (along with the HDF5 static libraries). -+## If HDF5_LIBDIR is empty or not given, NetCDF fortran shared libraries will be used. -+## If empty or not given, the netcdf fortran shared libraries will be searched for in NETCDF_LIBDIR -+## ========================= -+# NETCDFFORTRAN_LIBDIR = /usr/local/netcdf/lib -+# NETCDFFORTRAN_LIBDIR = $(HOME)/.local/lib -+# NETCDFFORTRAN_LIBDIR = /usr/local/lib -+# NETCDFFORTRAN_LIBDIR = /usr/lib64 -+# NETCDFFORTRAN_LIBDIR = /usr/lib -+# NETCDFFORTRAN_LIBDIR = /p/software/juwels/stages/Devel-2020/software/netCDF-Fortran/4.5.3-gompi-2020/lib64 -+NETCDFFORTRAN_LIBDIR = $(EBROOTNETCDFMINFORTRAN)/lib -+ -+## diff --git a/Golden_Repo/p/PyFerret/configure_pyferret-7.6.3_stage2021.patch b/Golden_Repo/p/PyFerret/configure_pyferret-7.6.3_stage2021.patch deleted file mode 100644 index 553506e07a417830f81db8044692d911fa744602..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyFerret/configure_pyferret-7.6.3_stage2021.patch +++ /dev/null @@ -1,438 +0,0 @@ -diff -Naur PyFerret-7.6.3.orig/external_functions/ef_utility/site_specific.mk PyFerret-7.6.3/external_functions/ef_utility/site_specific.mk ---- PyFerret-7.6.3.orig/external_functions/ef_utility/site_specific.mk 1970-01-01 01:00:00.000000000 +0100 -+++ PyFerret-7.6.3/external_functions/ef_utility/site_specific.mk 2020-12-17 15:22:43.154040000 +0100 -@@ -0,0 +1,75 @@ -+## Site-dependent definitions included in external function Makefiles -+## of an installed PyFerret directory. -+ -+## ========================= -+## Machine type for which to build Ferret/PyFerret -+## x86_64-linux for 64-bit RHEL -+## x86_64-linux-gnu for 64-bit Ubuntu and many "free" Linux systems -+## i386-linux for 32-bit RHEL -+## i386-linux-gnu for 32-bit Ubuntu and many "free" Linux systems -+## intel-mac for Max OSX -+## This value is used to determine which platform_specific.mk -+## file to include in the Makefiles. -+## ========================= -+# BUILDTYPE = $(HOSTTYPE) -+BUILDTYPE = x86_64-linux -+# BUILDTYPE = x86_64-linux-gnu -+# BUILDTYPE = i386-linux -+# BUILDTYPE = i386-linux-gnu -+# BUILDTYPE = intel-mac -+ -+## ========================= -+## INSTALL_FER_DIR and PYTHON_EXE are only used to construct -+## the location of pyferret library. The library should be -+## (for either 32-bit or 64-bit Linux) -+## $(INSTALL_FER_DIR)/lib/$(PYTHON_EXE)/site-package/pyferret/libpyferret.so -+## or possibly (for 64-bit Linux only) -+## $(INSTALL_FER_DIR)/lib64/$(PYTHON_EXE)/site-package/pyferret/libpyferret.so -+## -+## PyFerret installation directory, usually just $(FER_DIR) -+## Must be $(FER_DIR) when building pyferret from source. -+## (This file is also found in PyFerret installations, thus the option.) -+## ========================= -+# INSTALL_FER_DIR = $(HOME)/PyFerret -+INSTALL_FER_DIR = $(FER_DIR) -+ -+## ========================= -+## C and Fortran compilers to use. The construct "$(shell which gcc)" assigns -+## the response to "which gcc" run from a Bourne shell (such as bash). -+## When compiling for Mac OS X, one may wish to use clang instead of gcc. -+## If you wish to use values already defined in you shell environment when -+## you run make, comment out all definitions of CC and FC (do NOT leave blank). -+## ========================= -+# CC = $(shell which clang) -+CC = $(shell which gcc) -+FC = $(shell which gfortran) -w -fallow-argument-mismatch -fallow-invalid-boz -+ -+## ========================= -+## Linker used to generate executables and shared-object libraries. -+## Normally the Fortran compiler is used which then calls the actual -+## linker with appropriate flags and system-level Fortran libraries. -+## The construct "$(shell which gfortran)" assigns the response to -+## "which gfortran" run from a Bourne shell (such as bash). -+## If you wish to use a value already defined in you shell environment when -+## you run make, comment out all definitions of LD (do NOT leave blank). -+## ========================= -+LD = $(shell which gfortran) -+ -+## ========================= -+## Python version used by PyFerret -+## ========================= -+# PYTHON_EXE = python2.6 -+# PYTHON_EXE = python2.7 -+# PYTHON_EXE = python3.6 -+# PYTHON_EXE = python3.7 -+PYTHON_EXE = python3.8 -+ -+## ========================= -+## FER_LOCAL_EXTFCNS is the directory in which to install -+## the Ferret Fortran external functions. The example -+## functions that come with the PyFerret installation are -+## installed in $(INSTALL_FER_DIR)/ext_func/pylibs -+## ========================= -+FER_LOCAL_EXTFCNS = $(INSTALL_FER_DIR)/ext_func/pylibs -+ -+## -diff -Naur PyFerret-7.6.3.orig/Makefile PyFerret-7.6.3/Makefile ---- PyFerret-7.6.3.orig/Makefile 2020-09-28 21:10:48.000000000 +0200 -+++ PyFerret-7.6.3/Makefile 2020-12-17 14:37:08.501952000 +0100 -@@ -74,10 +74,12 @@ - export CFLAGS="$(CFLAGS) -DNDEBUG -O" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -@@ -94,10 +96,12 @@ - export CFLAGS="$(CFLAGS) -DNDEBUG -O" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -@@ -117,10 +121,12 @@ - export CFLAGS="$(CFLAGS) -UNDEBUG -O0 -g" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -@@ -137,10 +143,12 @@ - export CFLAGS="$(CFLAGS) -UNDEBUG -O0 -g" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -@@ -185,10 +193,12 @@ - export CFLAGS="$(CFLAGS) -O" ; \ - export BUILDTYPE=$(BUILDTYPE) ; \ - export NETCDF_LIBDIR=$(NETCDF_LIBDIR) ; \ -+ export NETCDFFORTRAN_LIBDIR=$(NETCDFFORTRAN_LIBDIR) ; \ - export HDF5_LIBDIR=$(HDF5_LIBDIR) ; \ - export SZ_LIBDIR=$(SZ_LIBDIR) ; \ - export CAIRO_LIBDIR=$(CAIRO_LIBDIR) ; \ - export PIXMAN_LIBDIR=$(PIXMAN_LIBDIR) ; \ -+ export HARFBUZZ_LIBDIR=$(HARFBUZZ_LIBDIR) ; \ - export PANGO_LIBDIR=$(PANGO_LIBDIR) ; \ - export GLIB2_LIBDIR=$(GLIB2_LIBDIR) ; \ - export GFORTRAN_LIB=$(GFORTRAN_LIB) ; \ -diff -Naur PyFerret-7.6.3.orig/platform_specific.mk.x86_64-linux PyFerret-7.6.3/platform_specific.mk.x86_64-linux ---- PyFerret-7.6.3.orig/platform_specific.mk.x86_64-linux 2020-09-28 21:10:48.000000000 +0200 -+++ PyFerret-7.6.3/platform_specific.mk.x86_64-linux 2020-12-17 14:36:27.694118000 +0100 -@@ -17,6 +17,7 @@ - # Include directories - # - NETCDF_INCLUDE = -I$(NETCDF_LIBDIR)/../include -+ NETCDFFORTRAN_INCLUDE = -I$(NETCDFFORTRAN_LIBDIR)/../include - - ifeq ($(strip $(HDF5_LIBDIR)),) - HDF5_INCLUDE = -@@ -36,6 +37,12 @@ - PIXMAN_INCLUDE = -I$(PIXMAN_LIBDIR)/../include - endif - -+ifeq ($(strip $(HARFBUZZ_LIBDIR)),) -+ HARFBUZZ_INCLUDE = -I/usr/include/harfbuzz -+else -+ HARFBUZZ_INCLUDE = -I$(HARFBUZZ_LIBDIR)/../include/harfbuzz -+endif -+ - ifeq ($(strip $(PANGO_LIBDIR)),) - PANGO_INCLUDE = -I/usr/include/pango-1.0 - else -@@ -64,9 +71,11 @@ - -I$(DIR_PREFIX)/pyfermod \ - -I$(DIR_PREFIX)/external_functions/ef_utility \ - $(NETCDF_INCLUDE) \ -+ $(NETCDFFORTRAN_INCLUDE) \ - $(HDF5_INCLUDE) \ - $(CAIRO_INCLUDE) \ - $(PIXMAN_INCLUDE) \ -+ $(HARFBUZZ_INCLUDE) \ - $(PANGO_INCLUDE) \ - $(GLIB2_INCLUDE) - -diff -Naur PyFerret-7.6.3.orig/setup.py PyFerret-7.6.3/setup.py ---- PyFerret-7.6.3.orig/setup.py 2020-09-28 21:10:48.000000000 +0200 -+++ PyFerret-7.6.3/setup.py 2020-12-17 14:36:06.465376000 +0100 -@@ -30,6 +30,13 @@ - if not netcdf_libdir: - raise ValueError("Environment variable NETCDF_LIBDIR is not defined") - -+# NETCDFFORTAN_LIBDIR must be given, either for the static library or the shared-object library -+netcdffortran_libdir = os.getenv("NETCDFFORTRAN_LIBDIR") -+if netcdffortran_libdir: -+ netcdffortran_libdir = netcdffortran_libdir.strip() -+else: -+ netcdffortran_libdir = netcdf_libdir -+ - # HDF5_LIBDIR is only given if the HDF5 and NetCDF libraries are to be statically linked - hdf5_libdir = os.getenv("HDF5_LIBDIR") - if hdf5_libdir: -@@ -50,6 +57,11 @@ - if pixman_libdir: - pixman_libdir = pixman_libdir.strip() - -+# HARFBUZZ gives a non-standard location of the harfbuzz libraries -+harfbuzz_libdir = os.getenv("HARFBUZZ_LIBDIR") -+if harfbuzz_libdir: -+ harfbuzz_libdir = harfbuzz_libdir.strip() -+ - # PANGO_LIBDIR gives a non-standard location of the pango libraries - pango_libdir = os.getenv("PANGO_LIBDIR") - if pango_libdir: -@@ -66,6 +78,8 @@ - - # The list of additional directories to examine for libraries - libdir_list = [ "lib", netcdf_libdir, ] -+if netcdffortran_libdir: -+ libdir_list.append(netcdffortran_libdir) - if hdf5_libdir: - libdir_list.append(hdf5_libdir) - if sz_libdir: -@@ -74,6 +88,8 @@ - libdir_list.append(cairo_libdir) - if pixman_libdir: - libdir_list.append(pixman_libdir) -+if harfbuzz_libdir: -+ libdir_list.append(harfbuzz_libdir) - if pango_libdir: - libdir_list.append(pango_libdir) - libdir_list.append(python_libdir) -@@ -103,7 +119,7 @@ - # The hdf5 libraries are only used to resolve netcdf library function - # calls when statically linking in the netcdf libraries. - if hdf5_libdir: -- netcdff_lib = os.path.join(netcdf_libdir, "libnetcdff.a") -+ netcdff_lib = os.path.join(netcdffortran_libdir, "libnetcdff.a") - addn_link_args.append(netcdff_lib) - netcdf_lib = os.path.join(netcdf_libdir, "libnetcdf.a") - addn_link_args.append(netcdf_lib) -diff -Naur PyFerret-7.6.3.orig/site_specific.mk PyFerret-7.6.3/site_specific.mk ---- PyFerret-7.6.3.orig/site_specific.mk 1970-01-01 01:00:00.000000000 +0100 -+++ PyFerret-7.6.3/site_specific.mk 2020-12-17 14:41:42.140897914 +0100 -@@ -0,0 +1,195 @@ -+## Site-dependent definitions included in Makefiles -+ -+## !!! Also verify the values in external_functions/ef_utility/site_specific.mk !!! -+ -+## ========================= -+## Full path name of the directory containing this file (the ferret root directory). -+## Do not use $(shell pwd) since this is included in Makefiles in other directories. -+## ========================= -+# DIR_PREFIX = $(HOME)/build/pyferret_dev -+# DIR_PREFIX = $(HOME)/svn/pyferret -+DIR_PREFIX = $(FER_SRC_PREFIX) -+ -+## ========================= -+## Installation directory for built PyFerret. -+## Using the "install" Makefile target creates a generic pyferret-*.tar.gz file -+## and then extracts it to create a PyFerret installation at this location. -+## ========================= -+# INSTALL_FER_DIR = $(HOME)/ferret_distributions/rhel6_64 -+INSTALL_FER_DIR = $(FER_DIR) -+ -+## ========================= -+## Machine type for which to build Ferret/PyFerret -+## x86_64-linux for 64-bit RHEL -+## x86_64-linux-gnu for 64-bit Ubuntu and many "free" Linux systems -+## i386-linux for 32-bit RHEL -+## i386-linux-gnu for 32-bit Ubuntu and many "free" Linux systems -+## intel-mac for Mac OSX -+## ========================= -+# BUILDTYPE = $(HOSTTYPE) -+BUILDTYPE = x86_64-linux -+# BUILDTYPE = x86_64-linux-gnu -+# BUILDTYPE = i386-linux -+# BUILDTYPE = i386-linux-gnu -+# BUILDTYPE = intel-mac -+ -+## ========================= -+## C and Fortran compilers to use. The construct "$(shell which gcc)" assigns -+## the response to "which gcc" run from a Bourne shell (such as bash). -+## When compiling for Mac OS X, one may wish to use clang instead of gcc. -+## If you wish to use values already defined in you shell environment when -+## you run make, comment out all definitions of CC and FC (do NOT leave blank). -+## ========================= -+# CC = $(shell which clang) -+CC = $(shell which gcc) -+FC = $(shell which gfortran) -w -fallow-argument-mismatch -fallow-invalid-boz -+ -+## ========================= -+## Python executable to invoke for build and install. -+## ========================= -+# PYTHON_EXE = python2.6 -+# PYTHON_EXE = python2.7 -+# PYTHON_EXE = python3.6 -+# PYTHON_EXE = python3.7 -+PYTHON_EXE = python3.8 -+ -+## ========================= -+## Full path to the python include files directory. -+## Should not need any modifications. -+## ========================= -+PYTHONINCDIR := $(shell $(PYTHON_EXE) -c "from __future__ import print_function ; import distutils.sysconfig; print(distutils.sysconfig.get_python_inc())") -+ -+## ========================= -+## If given and not empty, the full path name to the gfortran library to use. -+## This is primarily used for the intel-mac build. -+## The given scripts (commented out) should provide the correct value for the gfortran libraries. -+## If empty or not given, the gfortran library is linked in using the "-lgfortran" flag. -+## ========================= -+# GFORTRAN_LIB = $(shell $(FC) --print-file-name=libgfortran.dylib) -+# GFORTRAN_LIB = $(shell $(FC) --print-file-name=libgfortran.a) -+GFORTRAN_LIB = -+ -+## ========================= -+## Directory containing the Cairo static libraries (v1.12 or later). -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, the Cairo static libraries found under this directory will be used. -+## If empty or not given, the system's Cairo shared libraries will be used. -+## ========================= -+# CAIRO_LIBDIR = /usr/local/cairo/lib -+# CAIRO_LIBDIR = $(HOME)/.local/lib -+# CAIRO_LIBDIR = /usr/local/lib -+# CAIRO_LIBDIR = /p/software/juwels/stages/Devel-2020/software/cairo/1.17.2-GCCcore-9.3.0/lib64 -+CAIRO_LIBDIR = $(EBROOTCAIRO)/lib -+ -+## ========================= -+## Directory containing the Pixman static libraries used by the above Cairo static libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, the Pixman-1 static libraries found in this directory will be used. -+## If empty or not given, the system's Pixman-1 shared library will be used. -+## This value should be empty or not given if CAIRO_LIBDIR is empty or not given. -+## ========================= -+# PIXMAN_LIBDIR = /usr/local/pixman/lib -+# PIXMAN_LIBDIR = $(HOME)/.local/lib -+# PIXMAN_LIBDIR = /usr/local/lib -+# PIXMAN_LIBDIR = /gpfs/software/juwels/stages/Devel-2020/software/pixman/0.40.0-GCCcore-9.3.0/lib -+PIXMAN_LIBDIR = $(EBROOTPIXMAN)/lib -+ -+## ========================= -+## Directory containing the HarfBuzz shared libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, the HarfBuzz shared libraries under this directory are used. -+## If empty or not given, the system's HarfBuzz shared libraries are used. -+## This value should be empty or not given if CAIRO_LIBDIR is empty or not given. -+## ========================= -+#HARFBUZZ_LIBDIR = /p/software/juwels/stages/Devel-2020/software/HarfBuzz/2.6.7-GCCcore-9.3.0/lib64 -+HARFBUZZ_LIBDIR = $(EBROOTHARFBUZZ)/lib -+ -+## ========================= -+## Directory containing the Pango shared libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, the Pango shared libraries under this directory are used. -+## If empty or not given, the system's Pango shared libraries are used. -+## This value should be empty or not given if CAIRO_LIBDIR is empty or not given. -+## ========================= -+# PANGO_LIBDIR = $(HOME)/.local/lib -+# PANGO_LIBDIR = /usr/local/lib -+# PANGO_LIBDIR = /gpfs/software/juwels/stages/Devel-2020/software/Pango/1.44.7-GCCcore-9.3.0/lib -+PANGO_LIBDIR = $(EBROOTPANGO)/lib -+ -+## ========================= -+## Library directory containing a "glib-2.0" subdirectory with GLib-2.0 include file(s) -+## (yes, a little unusual) used by the above Pango shared libraries. -+## An "include" sibling directory containing a "glib-2.0" subdirectory with more -+## GLib-2.0 include files is assumed to exist. -+## If given and not empty, GLib-2.0 include files under this directory are used. -+## If empty or not given, the system's GLib-2.0 shared libraries are used. -+## This value should be empty or not given if PANGO_LIBDIR is empty or not given. -+## ========================= -+# GLIB2_LIBDIR = $(HOME)/.local/lib -+# GLIB2_LIBDIR = /usr/local/lib -+# GLIB2_LIBDIR = /gpfs/software/juwels/stages/Devel-2020/software/GLib/2.64.4-GCCcore-9.3.0/lib -+GLIB2_LIBDIR = $(EBROOTGLIB)/lib -+ -+## ========================= -+## Directory containing the HDF5 static libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If given and not empty, HDF5 and NetCDF static libraries will be used. -+## If empty or not given, NetCDF shared libraries (which specify the HDF5 and -+## compression libraries required) will be used. -+## ========================= -+# HDF5_LIBDIR = /usr/local/hdf5/lib -+# HDF5_LIBDIR = $(HOME)/.local/lib -+# HDF5_LIBDIR = /usr/local/lib -+# HDF5_LIBDIR = /usr/lib64 -+# HDF5_LIBDIR = /usr/lib -+# HDF5_LIBDIR = /p/software/juwels/stages/Devel-2020/software/HDF5/1.10.6-gompi-2020/lib64 -+# HDF5_LIBDIR = $(EBROOTHDF5)/lib -+ -+## ========================= -+## Location of the SZ compression static library. -+## This value should be given only if the SZ compression library was used in -+## building the HDF5 library, and the NetCDF and HDF5 static libraries are being -+## used (HDF5_LIBDIR is given and not empty). -+## If given and not empty, the SZ compression static library is used. -+## If empty or not given, the SZ compression library will not be used -+## (which is what you want if the HDF5 libraries were built using the Z compression library). -+## ========================= -+# SZ_LIBDIR = $(HOME)/.local/lib -+# SZ_LIBDIR = /usr/local/lib -+# SZ_LIBDIR = /usr/lib64 -+# SZ_LIBDIR = /usr/lib -+# SZ_LIBDIR = /gpfs/software/juwels/stages/Devel-2020/software/Szip/2.1.1-GCCcore-9.3.0/lib -+SZ_LIBDIR = $(EBROOTSZIP)/lib -+ -+## ========================= -+## Location of the NetCDF libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If HDF5_LIBDIR is given and not empty, the static libraries will be used -+## (along with the HDF5 static libraries). -+## If HDF5_LIBDIR is empty or not given, NetCDF shared libraries will be used. -+## ========================= -+# NETCDF_LIBDIR = /usr/local/netcdf/lib -+# NETCDF_LIBDIR = $(HOME)/.local/lib -+# NETCDF_LIBDIR = /usr/local/lib -+# NETCDF_LIBDIR = /usr/lib64 -+# NETCDF_LIBDIR = /usr/lib -+# NETCDF_LIBDIR = /p/software/juwels/stages/Devel-2020/software/netCDF/4.7.4-gompi-2020/lib64 -+NETCDF_LIBDIR = $(EBROOTNETCDF)/lib64 -+ -+## ========================= -+## Location of the NetCDF fortran libraries. -+## Include files are assumed to be located in an "include" sibling directory. -+## If HDF5_LIBDIR is given and not empty, the static libraries will be used -+## (along with the HDF5 static libraries). -+## If HDF5_LIBDIR is empty or not given, NetCDF fortran shared libraries will be used. -+## If empty or not given, the netcdf fortran shared libraries will be searched for in NETCDF_LIBDIR -+## ========================= -+# NETCDFFORTRAN_LIBDIR = /usr/local/netcdf/lib -+# NETCDFFORTRAN_LIBDIR = $(HOME)/.local/lib -+# NETCDFFORTRAN_LIBDIR = /usr/local/lib -+# NETCDFFORTRAN_LIBDIR = /usr/lib64 -+# NETCDFFORTRAN_LIBDIR = /usr/lib -+# NETCDFFORTRAN_LIBDIR = /p/software/juwels/stages/Devel-2020/software/netCDF-Fortran/4.5.3-gompi-2020/lib64 -+NETCDFFORTRAN_LIBDIR = $(EBROOTNETCDFMINFORTRAN)/lib -+ -+## diff --git a/Golden_Repo/p/PyFerret/pyferret b/Golden_Repo/p/PyFerret/pyferret deleted file mode 100755 index 46e83c36bf729bd15b7417fc107aa2cafdc9a41d..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyFerret/pyferret +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -if [ -z "${FER_LIBS}" ]; then - echo "**ERROR: Ferret environment variables are not defined" - exit 1 -fi -python -i -c "import sys; import pyferret; (errval, errmsg) = pyferret.init(sys.argv[1:], True)" $* diff --git a/Golden_Repo/p/PyGObject/PyGObject-3.34.0-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/p/PyGObject/PyGObject-3.34.0-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index b04f37dba2539da5223044cdc5aba7653ab15841..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyGObject/PyGObject-3.34.0-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'PyGObject' -version = '3.34.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pygobject.readthedocs.io' -description = """PyGObject is a Python package which provides bindings for GObject based -libraries such as GTK, GStreamer, WebKitGTK, GLib, GIO and many more. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['2acb0daf2b3a23a90f52066cc23d1053339fee2f5f7f4275f8baa3704ae0c543'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('GLib', '2.64.4'), - ('GObject-Introspection', '1.64.1'), - ('PyCairo', '1.18.2', versionsuffix), - ('libffi', '3.3'), -] - -use_pip = True -sanity_pip_check = True -download_dep_fail = True - -# Don't build a wheel or the pkg-cfg file won't be installed -installopts = '--no-binary=%(namelower)s --no-use-pep517' - -sanity_check_paths = { - 'files': ['include/pygobject-%(version_major)s.0/pygobject.h'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -options = {'modulename': 'gi'} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PyGObject/PyGObject-3.34.0-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/PyGObject/PyGObject-3.34.0-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index e64e5fedbd5150883504dd7e8ea1f4c47167b8dc..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyGObject/PyGObject-3.34.0-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'PyGObject' -version = '3.34.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pygobject.readthedocs.io' -description = """PyGObject is a Python package which provides bindings for GObject based -libraries such as GTK, GStreamer, WebKitGTK, GLib, GIO and many more. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['2acb0daf2b3a23a90f52066cc23d1053339fee2f5f7f4275f8baa3704ae0c543'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('GLib', '2.64.4'), - ('GObject-Introspection', '1.64.1'), - ('PyCairo', '1.18.2', versionsuffix), - ('libffi', '3.3'), -] - -use_pip = True -sanity_pip_check = True -download_dep_fail = True - -# Don't build a wheel or the pkg-cfg file won't be installed -installopts = '--no-binary=%(namelower)s --no-use-pep517' - -sanity_check_paths = { - 'files': ['include/pygobject-%(version_major)s.0/pygobject.h'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -options = {'modulename': 'gi'} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PyOpenCL/PyOpenCL-2020.2.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/p/PyOpenCL/PyOpenCL-2020.2.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 4074f67a9fcd9af8591a6dd0115ecf54d7f91aad..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyOpenCL/PyOpenCL-2020.2.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'Bundle' - -name = 'PyOpenCL' -version = '2020.2.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = "https://pypi.python.org/pypi/pyopencl/" -description = """Python wrapper for OpenCL. PyOpenCL lets you access GPUs and other massively parallel compute devices -from Python. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') - -dependencies = [ - ('Python', '3.8.5'), - ('pybind11', '2.5.0', versionsuffix), - ('SciPy-Stack', '2020', versionsuffix), - ('CUDA', '11.0', '', ('dummy', 'dummy')), -] - -exts_download_dep_fail = True - -exts_list = [ - ('pytools', '2020.4', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pytools'], - }), - ('pyopencl', version, { - 'source_urls': ['https://pypi.python.org/packages/source/p/pyopencl'], - }), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'lib' diff --git a/Golden_Repo/p/PyQt5/PyQt5-5.15.1-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/p/PyQt5/PyQt5-5.15.1-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 98e22b1b49a4ce3a0d990eb301e0a84ff60771ac..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyQt5/PyQt5-5.15.1-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,107 +0,0 @@ -easyblock = 'Bundle' - -name = 'PyQt5' -version = '5.15.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://www.riverbankcomputing.com/software/pyqt' -description = """PyQt5 is a set of Python bindings for v5 of the Qt application framework from The Qt Company. -This bundle includes PyQtWebEngine, a set of Python bindings for The Qt Company’s Qt WebEngine framework.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'cstd': 'c++11'} - -builddependencies = [('binutils', '2.36.1')] -dependencies = [ - ('Python', '3.8.5'), - ('Qt5', '5.14.2'), -] - -default_easyblock = 'PythonPackage' - -local_pylibdir = '%(installdir)s/lib/python%(pyshortver)s/site-packages' - -local_pyqt5_sip_install = " ".join([ - "sip-install", - "--verbose", - "--confirm-license", - "--target-dir " + local_pylibdir, -]) - -local_pyqtweb_configopts = " ".join([ - "configure.py", - "--verbose", - "--destdir=%s/PyQt5" % local_pylibdir, - "--apidir=%(installdir)s/qsci", - "--pyqt-sipdir=%(builddir)s/PyQt5-%(version)s/sip", - "--no-stubs", - "--no-dist-info", -]) - -local_setup_env = "export PATH=%(installdir)s/bin:$PATH && " -local_setup_env += "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " -local_sipver = '5.4.0' -components = [ - ('SIP', local_sipver, { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCELOWER_TAR_GZ], - 'checksums': ['4282ab45948674f5ef74278a8e70d1302f65c95b519a0af19409002f5715d641'], - 'start_dir': 'sip-%s' % local_sipver, - 'use_pip': True, - 'options': {'modulename': 'PyQt5.sip'}, - }), - ('PyQt-builder', '1.5.0', { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['11bbe26e8e3d5ffec6d2ef2f50596b1670eb2d8b49aee0f859821922d8282841'], - 'start_dir': 'PyQt-builder-%(version)s', - 'use_pip': True, - }), - ('PyQt5_sip', '12.8.1', { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['30e944db9abee9cc757aea16906d4198129558533eb7fadbe48c5da2bd18e0bd'], - 'start_dir': 'PyQt5_sip-%(version)s', - 'use_pip': True, - }), - (name, version, { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['d9a76b850246d08da9863189ecb98f6c2aa9b4d97a3e85e29330a264aed0f9a1'], - 'easyblock': 'Binary', - 'start_dir': '%(name)s-%(version)s', - 'skipsteps': ['configure', 'build'], - 'install_cmd': local_setup_env + local_pyqt5_sip_install, - }), - ('PyQtWebEngine', version, { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['f0ca7915ee206ba5d703168c6ca40b0aad62c67360328fae4af5359cdbcee439'], - 'easyblock': 'ConfigureMakePythonPackage', - 'start_dir': '%(name)s-%(version)s', - 'preconfigopts': local_setup_env, - 'configopts': local_pyqtweb_configopts, - 'options': {'modulename': 'PyQt5.QtWebEngine'}, - }), -] - -sanity_check_paths = { - 'files': ['bin/pyqt-bundle', 'bin/sip-build', 'bin/sip-install', 'bin/sip5'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -sanity_check_commands = [ - "python -c 'import PyQt5.QtCore'", - "python -c 'import PyQt5.pyrcc'", - "python -c 'import PyQt5.uic'", - "sip5 --help", -] - -modextrapaths = { - 'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages', - 'QT_INSTALL_DATA': 'qsci', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PyQt5/PyQt5-5.15.1-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/PyQt5/PyQt5-5.15.1-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index ddaa5035a5416cca27ee648d030df5066e3f19ab..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyQt5/PyQt5-5.15.1-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,107 +0,0 @@ -easyblock = 'Bundle' - -name = 'PyQt5' -version = '5.15.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://www.riverbankcomputing.com/software/pyqt' -description = """PyQt5 is a set of Python bindings for v5 of the Qt application framework from The Qt Company. -This bundle includes PyQtWebEngine, a set of Python bindings for The Qt Company’s Qt WebEngine framework.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'cstd': 'c++11'} - -builddependencies = [('binutils', '2.34')] -dependencies = [ - ('Python', '3.8.5'), - ('Qt5', '5.14.2'), -] - -default_easyblock = 'PythonPackage' - -local_pylibdir = '%(installdir)s/lib/python%(pyshortver)s/site-packages' - -local_pyqt5_sip_install = " ".join([ - "sip-install", - "--verbose", - "--confirm-license", - "--target-dir " + local_pylibdir, -]) - -local_pyqtweb_configopts = " ".join([ - "configure.py", - "--verbose", - "--destdir=%s/PyQt5" % local_pylibdir, - "--apidir=%(installdir)s/qsci", - "--pyqt-sipdir=%(builddir)s/PyQt5-%(version)s/sip", - "--no-stubs", - "--no-dist-info", -]) - -local_setup_env = "export PATH=%(installdir)s/bin:$PATH && " -local_setup_env += "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " -local_sipver = '5.4.0' -components = [ - ('SIP', local_sipver, { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCELOWER_TAR_GZ], - 'checksums': ['4282ab45948674f5ef74278a8e70d1302f65c95b519a0af19409002f5715d641'], - 'start_dir': 'sip-%s' % local_sipver, - 'use_pip': True, - 'options': {'modulename': 'PyQt5.sip'}, - }), - ('PyQt-builder', '1.5.0', { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['11bbe26e8e3d5ffec6d2ef2f50596b1670eb2d8b49aee0f859821922d8282841'], - 'start_dir': 'PyQt-builder-%(version)s', - 'use_pip': True, - }), - ('PyQt5_sip', '12.8.1', { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['30e944db9abee9cc757aea16906d4198129558533eb7fadbe48c5da2bd18e0bd'], - 'start_dir': 'PyQt5_sip-%(version)s', - 'use_pip': True, - }), - (name, version, { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['d9a76b850246d08da9863189ecb98f6c2aa9b4d97a3e85e29330a264aed0f9a1'], - 'easyblock': 'Binary', - 'start_dir': '%(name)s-%(version)s', - 'skipsteps': ['configure', 'build'], - 'install_cmd': local_setup_env + local_pyqt5_sip_install, - }), - ('PyQtWebEngine', version, { - 'source_urls': [PYPI_SOURCE], - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['f0ca7915ee206ba5d703168c6ca40b0aad62c67360328fae4af5359cdbcee439'], - 'easyblock': 'ConfigureMakePythonPackage', - 'start_dir': '%(name)s-%(version)s', - 'preconfigopts': local_setup_env, - 'configopts': local_pyqtweb_configopts, - 'options': {'modulename': 'PyQt5.QtWebEngine'}, - }), -] - -sanity_check_paths = { - 'files': ['bin/pyqt-bundle', 'bin/sip-build', 'bin/sip-install', 'bin/sip5'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -sanity_check_commands = [ - "python -c 'import PyQt5.QtCore'", - "python -c 'import PyQt5.pyrcc'", - "python -c 'import PyQt5.uic'", - "sip5 --help", -] - -modextrapaths = { - 'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages', - 'QT_INSTALL_DATA': 'qsci', -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/PyQuil/PyQuil-2.27.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/p/PyQuil/PyQuil-2.27.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index 808eeb50afab84eb46296a5e58a29963c47c6cf1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyQuil/PyQuil-2.27.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,96 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyQuil' -version = '2.27.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/quantumlib/cirq' -description = """PyQuil is a library for generating and executing Quil programs on the Rigetti Forest platform.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', '-Python-%(pyver)s'), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'use_pip_for_deps': False, - 'download_dep_fail': True, - 'sanity_pip_check': True, -} - -exts_list = [ - # testing - ('typing_extensions', '3.7.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c')]), - ])), - ('typed_ast', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a')]), - ])), - ('mypy_extensions', '0.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8')]), - ])), - ('mypy', '0.790', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2b21ba45ad9ef2e2eb88ce4aeadd0112d0f5026418324176fd494a6824b74975')]), - ])), - ('requests-mock', '1.8.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e68f46844e4cee9d447150343c9ae875f99fa8037c6dcf5f15bf1fe9ab43d226')]), - ])), - ('pytest-cov', '2.10.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e')]), - ])), - ('pytest-timeout', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '20b3113cf6e4e80ce2d403b6fb56e9e1b871b510259206d40ff8d609f48bda76')]), - ])), - ('pytest-asyncio', '0.14.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700')]), - ])), - ('pytest-rerunfailures', '9.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1cb11a17fc121b3918414eb5eaf314ee325f2e693ac7cb3f6abf7560790827f2')]), - ])), - # pyquil application - ('python-rapidjson', '0.9.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ad80bd7e4bb15d9705227630037a433e2e2a7982b54b51de2ebabdd1611394a1')]), - ('modulename', 'rapidjson'), - ])), - ('ruamel.yaml', '0.16.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '412a6f5cfdc0525dee6a27c08f5415c7fd832a7afcb7a0ed7319628aed23d408')]), - ])), - ('ruamel.yaml.clib', '0.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'cee86ecc893a6a8ecaa7c6a9c2d06f75f614176210d78a5f155f8e78d6989509')]), - ('modulename', 'ruamel.yaml'), # fake to make sanitycheck shut-up - ])), - ('rpcq', '3.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd4695048f874aa255764336fa1965f6fc1ea001a31e28681bbbef708cac531e1')]), - ])), - ('antlr4-python3-runtime', '4.7.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '168cdcec8fb9152e84a87ca6fd261b3d54c8f6358f42ab3b813b14a7193bb50b')]), - ('modulename', 'antlr4'), - ])), - ('lark', '0.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f2c6ed79ae128a89714bbaa4a6ecb61b6eec84d1b5d63b9195ad461762f96298')]), - ])), - ('pyquil', version, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9082bbb58d1c104303d5e3890c81063f802edb5570c47d9f0176ae9df06f4081')]), - ])), - # addon - ('contextvars', '2.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2341042e1c03a271813e07dba29b6b60fa85c1005ea5ed1638a076cf50b4d625')]), - ])), - ('immutables', '0.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '63023fa0cceedc62e0d1535cd4ca7a1f6df3120a6d8e5c34e89037402a6fd809')]), - ])), -] - -moduleclass = 'quantum' diff --git a/Golden_Repo/p/PyQuil/PyQuil-2.27.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/p/PyQuil/PyQuil-2.27.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 1a850b9db0b4a67057f3f13c71afdee1b83053c7..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyQuil/PyQuil-2.27.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,94 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyQuil' -version = '2.27.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/quantumlib/cirq' -description = """PyQuil is a library for generating and executing Quil programs on the Rigetti Forest platform.""" - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', '-Python-%(pyver)s'), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'use_pip_for_deps': False, - 'download_dep_fail': True, - 'sanity_pip_check': True, -} - -exts_list = [ - # testing - ('typing_extensions', '3.7.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c')]), - ])), - ('typed_ast', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a')]), - ])), - ('mypy_extensions', '0.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8')]), - ])), - ('mypy', '0.790', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2b21ba45ad9ef2e2eb88ce4aeadd0112d0f5026418324176fd494a6824b74975')]), - ])), - ('requests-mock', '1.8.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'e68f46844e4cee9d447150343c9ae875f99fa8037c6dcf5f15bf1fe9ab43d226')]), - ])), - ('pytest-cov', '2.10.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e')]), - ])), - ('pytest-timeout', '1.4.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '20b3113cf6e4e80ce2d403b6fb56e9e1b871b510259206d40ff8d609f48bda76')]), - ])), - ('pytest-asyncio', '0.14.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700')]), - ])), - ('pytest-rerunfailures', '9.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '1cb11a17fc121b3918414eb5eaf314ee325f2e693ac7cb3f6abf7560790827f2')]), - ])), - # pyquil application - ('python-rapidjson', '0.9.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ad80bd7e4bb15d9705227630037a433e2e2a7982b54b51de2ebabdd1611394a1')]), - ('modulename', 'rapidjson'), - ])), - ('ruamel.yaml', '0.16.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '412a6f5cfdc0525dee6a27c08f5415c7fd832a7afcb7a0ed7319628aed23d408')]), - ])), - ('ruamel.yaml.clib', '0.1.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'cee86ecc893a6a8ecaa7c6a9c2d06f75f614176210d78a5f155f8e78d6989509')]), - ('modulename', 'ruamel.yaml'), # fake to make sanitycheck shut-up - ])), - ('rpcq', '3.7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd4695048f874aa255764336fa1965f6fc1ea001a31e28681bbbef708cac531e1')]), - ])), - ('antlr4-python3-runtime', '4.7.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '168cdcec8fb9152e84a87ca6fd261b3d54c8f6358f42ab3b813b14a7193bb50b')]), - ('modulename', 'antlr4'), - ])), - ('lark', '0.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'f2c6ed79ae128a89714bbaa4a6ecb61b6eec84d1b5d63b9195ad461762f96298')]), - ])), - ('pyquil', version, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9082bbb58d1c104303d5e3890c81063f802edb5570c47d9f0176ae9df06f4081')]), - ])), - # addon - ('contextvars', '2.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2341042e1c03a271813e07dba29b6b60fa85c1005ea5ed1638a076cf50b4d625')]), - ])), - ('immutables', '0.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '63023fa0cceedc62e0d1535cd4ca7a1f6df3120a6d8e5c34e89037402a6fd809')]), - ])), -] - -moduleclass = 'quantum' diff --git a/Golden_Repo/p/PySCF/PySCF-1.7.1-GCC-10.3.0-Python-3.8.5.eb b/Golden_Repo/p/PySCF/PySCF-1.7.1-GCC-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 744aa798afa8099bef83ae3f07fcd1e5a8df62bb..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PySCF/PySCF-1.7.1-GCC-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'CMakeMakeCp' - -name = 'PySCF' -version = '1.7.1' # last version which requires libxc<5 -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.pyscf.org' -description = "PySCF is an open-source collection of electronic structure modules powered by Python." - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/pyscf/pyscf/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['23ccf6d8bb6a15fe7035e04e6ab00783a069bf38556873c71a6fb672b6159636'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('h5py', '2.10.0', '-serial' + versionsuffix), - ('qcint', '3.0.19', '', ('gcccoremkl', '9.3.0-2020.2.254')), - ('libxc', '4.3.4'), # no gcccore*-version available - only in toolchain GCC - ('XCFun', '20190127', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), -] - -start_dir = 'pyscf/lib' - -separate_build_dir = True - -configopts = "-DBUILD_LIBCINT=OFF -DBUILD_LIBXC=OFF -DBUILD_XCFUN=OFF" - -prebuildopts = "export PYSCF_INC_DIR=$EBROOTQCINT/include:$EBROOTLIBXC/lib && " - -files_to_copy = ['pyscf'] - -sanity_check_paths = { - 'files': ['pyscf/__init__.py'], - 'dirs': ['pyscf/data', 'pyscf/lib'], -} - -sanity_check_commands = ["python -c 'import pyscf'"] - -modextrapaths = {'PYTHONPATH': ''} - -moduleclass = 'chem' diff --git a/Golden_Repo/p/PySCF/PySCF-1.7.1-GCC-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/PySCF/PySCF-1.7.1-GCC-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 545f9160eece8aeea351aa96833c01fe7476fe12..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PySCF/PySCF-1.7.1-GCC-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,51 +0,0 @@ -easyblock = 'CMakeMakeCp' - -name = 'PySCF' -version = '1.7.1' # last version which requires libxc<5 -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.pyscf.org' -description = "PySCF is an open-source collection of electronic structure modules powered by Python." - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/pyscf/pyscf/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['23ccf6d8bb6a15fe7035e04e6ab00783a069bf38556873c71a6fb672b6159636'] - -builddependencies = [ - ('CMake', '3.18.0') -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('h5py', '2.10.0', '-serial' + versionsuffix), - ('qcint', '3.0.19', '', ('gcccoremkl', '9.3.0-2020.2.254')), - ('libxc', '4.3.4'), # no gcccore*-version available - only in toolchain GCC - ('XCFun', '20190127', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), -] - -start_dir = 'pyscf/lib' - -separate_build_dir = True - -configopts = "-DBUILD_LIBCINT=OFF -DBUILD_LIBXC=OFF -DBUILD_XCFUN=OFF" - -prebuildopts = "export PYSCF_INC_DIR=$EBROOTQCINT/include:$EBROOTLIBXC/lib && " - -files_to_copy = ['pyscf'] - -sanity_check_paths = { - 'files': ['pyscf/__init__.py'], - 'dirs': ['pyscf/data', 'pyscf/lib'], -} - -sanity_check_commands = ["python -c 'import pyscf'"] - -modextrapaths = {'PYTHONPATH': ''} - -moduleclass = 'chem' diff --git a/Golden_Repo/p/PyTorch/PyTorch-1.6.0_fix-test-dataloader-fixed-affinity.patch b/Golden_Repo/p/PyTorch/PyTorch-1.6.0_fix-test-dataloader-fixed-affinity.patch deleted file mode 100644 index 39e4d9883bdcafff1586aa278a08a41a8f786527..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyTorch/PyTorch-1.6.0_fix-test-dataloader-fixed-affinity.patch +++ /dev/null @@ -1,48 +0,0 @@ -From c7c7460fd3a49a9d289394b80d9ecf61898edf49 Mon Sep 17 00:00:00 2001 -From: Alexander Grund <alexander.grund@tu-dresden.de> -Date: Wed, 9 Sep 2020 08:47:03 +0200 -Subject: [PATCH] Choose test affinity based on current affinity - ---- - test/test_dataloader.py | 18 ++++++++++++------ - 1 file changed, 12 insertions(+), 6 deletions(-) - -diff --git a/test/test_dataloader.py b/test/test_dataloader.py -index ca0c9e6cb511f..745942bcf01f2 100644 ---- a/test/test_dataloader.py -+++ b/test/test_dataloader.py -@@ -2128,22 +2128,28 @@ def __iter__(self): - after = os.sched_getaffinity(0) - return iter(after) - -- --def worker_set_affinity(_): -- os.sched_setaffinity(0, [2]) -- -- - @unittest.skipIf( - not hasattr(os, 'sched_setaffinity'), - "os.sched_setaffinity is not available") - class TestSetAffinity(TestCase): - def test_set_affinity_in_worker_init(self): -+ # Query the current affinity mask to avoid setting a disallowed one -+ old_affinity = os.sched_getaffinity(0) -+ if not old_affinity: -+ self.skipTest("No affinity information") -+ # Choose any -+ expected_affinity = list(old_affinity)[-1] -+ -+ def worker_set_affinity(_): -+ os.sched_setaffinity(0, [expected_affinity]) -+ -+ - dataset = SetAffinityDataset() - - dataloader = torch.utils.data.DataLoader( - dataset, num_workers=2, worker_init_fn=worker_set_affinity) - for sample in dataloader: -- self.assertEqual(sample, [2]) -+ self.assertEqual(sample, [expected_affinity]) - - - diff --git a/Golden_Repo/p/PyTorch/PyTorch-1.7.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/p/PyTorch/PyTorch-1.7.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index 700cb585f01163563857f5495bcb70b6e26f4f0a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyTorch/PyTorch-1.7.0-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,287 +0,0 @@ -name = 'PyTorch' -version = '1.7.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pytorch.org/' -description = """Tensors and Dynamic neural networks in Python with strong GPU acceleration. -PyTorch is a deep learning framework that puts Python first.""" - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} - -site_contacts = 'Alexandre Strube <a.strube@fz-juelich.de>' - -osdependencies = [OS_PKG_IBVERBS_DEV] -local_cudaver = '11.3' - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Ninja', '1.10.0'), - ('Python', '3.8.5'), - ('protobuf', '3.13.0'), - ('pybind11', '2.5.0', versionsuffix), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('MPFR', '4.1.0'), - ('GMP', '6.2.0'), - ('numactl', '2.0.13', '', SYSTEM), - ('FFmpeg', '4.3.1'), - ('cuDNN', '8.2.1.32', '-CUDA-%s' % local_cudaver, SYSTEM), - ('magma', '2.5.4'), - ('NCCL', '2.8.3-1', '-CUDA-%s' % local_cudaver), - ('LLVM', '10.0.1'), - ('libvpx', '1.9.0'), -] - -# default CUDA compute capabilities to use (override via --cuda-compute-capabilities) -cuda_compute_capabilities = ['7.0', '7.5', '8.0'] - -# PyTorch pulls in a bunch of submodules that are not releases. -# We download the submodule revisions from their repos. -# The list is generated by easybuild-framework/easybuild/scripts/createSubmoduleDeps.sh -local_extract_cmd_pattern = 'tar -C pytorch-%%(version)s/third_party/%s --strip-components=1 -xf %%s' - -source_urls = ['https://github.com/pytorch/pytorch/archive'] - -sources = [ - 'v%(version)s.tar.gz', # PyTorch - { - 'source_urls': ['https://github.com/Maratyszcza/FP16/archive'], - 'download_filename': '4dfe081cf6bcd15db339cf2680b9281b8451eeb3.tar.gz', - 'filename': 'FP16-20200514.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'FP16', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/FXdiv/archive'], - 'download_filename': 'b408327ac2a15ec3e43352421954f5b1967701d1.tar.gz', - 'filename': 'FXdiv-20200417.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'FXdiv', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/NNPACK/archive'], - 'download_filename': '24b55303f5cf65d75844714513a0d1b1409809bd.tar.gz', - 'filename': 'NNPACK-20191007.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'NNPACK', - }, - { - 'source_urls': ['https://github.com/pytorch/QNNPACK/archive'], - 'download_filename': '7d2a4e9931a82adc3814275b6219a03e24e36b4c.tar.gz', - 'filename': 'QNNPACK-20190828.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'QNNPACK', - }, - { - 'source_urls': ['https://github.com/google/XNNPACK/archive'], - 'download_filename': '1b354636b5942826547055252f3b359b54acff95.tar.gz', - 'filename': 'XNNPACK-20200323.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'XNNPACK', - }, - { - 'source_urls': ['https://github.com/google/benchmark/archive'], - 'download_filename': '505be96ab23056580a3a2315abba048f4428b04e.tar.gz', - 'filename': 'benchmark-20180606.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'benchmark', - }, - { - 'source_urls': ['https://github.com/pytorch/cpuinfo/archive'], - 'download_filename': '63b254577ed77a8004a9be6ac707f3dccc4e1fd9.tar.gz', - 'filename': 'cpuinfo-20200611.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'cpuinfo', - }, - { - 'source_urls': ['https://github.com/NVlabs/cub/archive'], - 'download_filename': 'd106ddb991a56c3df1b6d51b2409e36ba8181ce4.tar.gz', - 'filename': 'cub-20200512.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'cub', - }, - { - 'source_urls': ['https://github.com/eigenteam/eigen-git-mirror/archive'], - 'download_filename': 'd41dc4dd74acce21fb210e7625d5d135751fa9e5.tar.gz', - 'filename': 'eigen-20190125.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'eigen', - }, - { - 'source_urls': ['https://github.com/pytorch/fbgemm/archive'], - 'download_filename': '1d710393d5b7588f5de3b83f51c22bbddf095229.tar.gz', - 'filename': 'fbgemm-20200914.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fbgemm', - }, - { - 'source_urls': ['https://github.com/asmjit/asmjit/archive'], - 'download_filename': '9057aa30b620f0662ff51e2230c126a345063064.tar.gz', - 'filename': 'asmjit-20200429.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fbgemm/third_party/asmjit', - }, - { - 'source_urls': ['https://github.com/pytorch/cpuinfo/archive'], - 'download_filename': 'd5e37adf1406cf899d7d9ec1d317c47506ccb970.tar.gz', - 'filename': 'cpuinfo-20190201.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fbgemm/third_party/cpuinfo', - }, - { - 'source_urls': ['https://github.com/google/googletest/archive'], - 'download_filename': '0fc5466dbb9e623029b1ada539717d10bd45e99e.tar.gz', - 'filename': 'googletest-20180925.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fbgemm/third_party/googletest', - }, - { - 'source_urls': ['https://github.com/fmtlib/fmt/archive'], - 'download_filename': 'cd4af11efc9c622896a3e4cb599fa28668ca3d05.tar.gz', - 'filename': 'fmt-20200806.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fmt', - }, - { - 'source_urls': ['https://github.com/houseroad/foxi/archive'], - 'download_filename': '4aba696ec8f31794fd42880346dc586486205e0a.tar.gz', - 'filename': 'foxi-20200922.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'foxi', - }, - { - 'source_urls': ['https://github.com/google/gemmlowp/archive'], - 'download_filename': '3fb5c176c17c765a3492cd2f0321b0dab712f350.tar.gz', - 'filename': 'gemmlowp-20181126.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'gemmlowp/gemmlowp', - }, - { - 'source_urls': ['https://github.com/facebookincubator/gloo/archive'], - 'download_filename': '3dc0328fe6a9d47bd47c0c6ca145a0d8a21845c6.tar.gz', - 'filename': 'gloo-20200918.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'gloo', - }, - { - 'source_urls': ['https://github.com/google/googletest/archive'], - 'download_filename': '2fe3bd994b3189899d93f1d5a881e725e046fdc2.tar.gz', - 'filename': 'googletest-20180831.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'googletest', - }, - { - 'source_urls': ['https://github.com/intel/ideep/archive'], - 'download_filename': 'ba885200dbbc1f144c7b58eba487378eb324f281.tar.gz', - 'filename': 'ideep-20200915.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'ideep', - }, - { - 'source_urls': ['https://github.com/intel/mkl-dnn/archive'], - 'download_filename': '5ef631a030a6f73131c77892041042805a06064f.tar.gz', - 'filename': 'mkl-dnn-20200909.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'ideep/mkl-dnn', - }, - { - 'source_urls': ['https://github.com/onnx/onnx/archive'], - 'download_filename': 'a82c6a7010e2e332d8f74ad5b0c726fd47c85376.tar.gz', - 'filename': 'onnx-20200531.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'onnx', - }, - { - 'source_urls': ['https://github.com/google/benchmark/archive'], - 'download_filename': 'e776aa0275e293707b6a0901e0e8d8a8a3679508.tar.gz', - 'filename': 'benchmark-20180525.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'onnx/third_party/benchmark', - }, - { - 'source_urls': ['https://github.com/onnx/onnx-tensorrt/archive'], - 'download_filename': 'c153211418a7c57ce071d9ce2a41f8d1c85a878f.tar.gz', - 'filename': 'onnx-tensorrt-20190916.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'onnx-tensorrt', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/psimd/archive'], - 'download_filename': '072586a71b55b7f8c584153d223e95687148a900.tar.gz', - 'filename': 'psimd-20200517.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'psimd', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/pthreadpool/archive'], - 'download_filename': '029c88620802e1361ccf41d1970bd5b07fd6b7bb.tar.gz', - 'filename': 'pthreadpool-20200615.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'pthreadpool', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/PeachPy/archive'], - 'download_filename': '07d8fde8ac45d7705129475c0f94ed8925b93473.tar.gz', - 'filename': 'PeachPy-20180218.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'python-peachpy', - }, - { - 'source_urls': ['https://github.com/shibatch/sleef/archive'], - 'download_filename': '7f523de651585fe25cade462efccca647dcc8d02.tar.gz', - 'filename': 'sleef-20190730.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'sleef', - }, - { - 'source_urls': ['https://github.com/01org/tbb/archive'], - 'download_filename': 'a51a90bc609bb73db8ea13841b5cf7aa4344d4a9.tar.gz', - 'filename': 'tbb-20181009.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tbb', - }, - { - 'source_urls': ['https://github.com/pytorch/tensorpipe/archive'], - 'download_filename': '95ff9319161fcdb3c674d2bb63fac3e94095b343.tar.gz', - 'filename': 'tensorpipe-20200928.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tensorpipe', - }, - { - 'source_urls': ['https://github.com/google/googletest/archive'], - 'download_filename': '2fe3bd994b3189899d93f1d5a881e725e046fdc2.tar.gz', - 'filename': 'googletest-20180831.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tensorpipe/third_party/googletest', - }, - { - 'source_urls': ['https://github.com/google/libnop/archive'], - 'download_filename': 'aa95422ea8c409e3f078d2ee7708a5f59a8b9fa2.tar.gz', - 'filename': 'libnop-20200723.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tensorpipe/third_party/libnop', - }, - { - 'source_urls': ['https://github.com/libuv/libuv/archive'], - 'download_filename': '02a9e1be252b623ee032a3137c0b0c94afbe6809.tar.gz', - 'filename': 'libuv-20200419.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tensorpipe/third_party/libuv', - }, -] -patches = [ - 'PyTorch-1.6.0_fix-test-dataloader-fixed-affinity.patch', - 'PyTorch-1.7.0_fix_test_DistributedDataParallel.patch', - 'PyTorch-1.7.0_fix_typing_python38.patch', - 'PyTorch-1.7.0_fix_remove_backports.patch', -] - -excluded_tests = { - '': [ - # Fails on HDFML. Probably needs a GPU and a network - needs NCCL - 'distributed/test_distributed_fork', - # Fails on HDFML. - 'distributed/test_distributed_spawn', - # Fails on JUWELS (cluster) with SIGXCPU and on JUWELSBOOSTER - 'test_foreach', - 'test_xnnpack_integration', - # Fails on JUSUF - 'distributed/rpc/test_process_group_agent', - 'distributed/rpc/test_tensorpipe_agent', - 'test_autograd', - 'test_jit', - # Disabling all distributed tests because I don't have the whole year. Each test takes 2 hours - 'distributed/nn/jit/test_instantiator', - 'distributed/rpc/test_faulty_agent', - 'distributed/rpc/test_process_group_agent', - 'distributed/rpc/test_tensorpipe_agent', - # Fails on JURECA-DC logins with SIGSEGV - 'distributed/test_c10d_spawn', - # Fails on JURECA-DC - 'test_unary_ufuncs', - # This test fails everywhere: https://github.com/pytorch/pytorch/issues/41242 - # 'test_cpp_extensions_jit', - # Throws illegal memory access due to float16: https://github.com/pytorch/pytorch/issues/41340 - # 'test_torch', - # Potentially problematic save/load issue with test_lstm on only some machines. Tell users to verify save&load! - # https://github.com/pytorch/pytorch/issues/43209 - # 'test_quantization', - ], -} - -runtest = 'cd test && %(python)s run_test.py --verbose %(excluded_tests)s' - -sanity_check_commands = ["python -c 'import caffe2.python'"] -tests = ['PyTorch-check-cpp-extension.py'] - -moduleclass = 'devel' diff --git a/Golden_Repo/p/PyTorch/PyTorch-1.7.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/p/PyTorch/PyTorch-1.7.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 865f39655e9398c84edee37ca14e3b146642ac9d..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyTorch/PyTorch-1.7.0-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,287 +0,0 @@ -name = 'PyTorch' -version = '1.7.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pytorch.org/' -description = """Tensors and Dynamic neural networks in Python with strong GPU acceleration. -PyTorch is a deep learning framework that puts Python first.""" - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -site_contacts = 'Alexandre Strube <a.strube@fz-juelich.de>' - -osdependencies = [OS_PKG_IBVERBS_DEV] -local_cudaver = '11.0' - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('Ninja', '1.10.0'), - ('Python', '3.8.5'), - ('protobuf', '3.13.0'), - ('pybind11', '2.5.0', versionsuffix), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('MPFR', '4.1.0'), - ('GMP', '6.2.0'), - ('numactl', '2.0.13', '', SYSTEM), - ('FFmpeg', '4.3.1'), - ('cuDNN', '8.0.2.39', '-CUDA-%s' % local_cudaver, SYSTEM), - ('magma', '2.5.4'), - ('NCCL', '2.8.3-1', '-CUDA-%s' % local_cudaver), - ('LLVM', '10.0.1'), - ('libvpx', '1.9.0'), -] - -# default CUDA compute capabilities to use (override via --cuda-compute-capabilities) -cuda_compute_capabilities = ['7.0', '7.5', '8.0'] - -# PyTorch pulls in a bunch of submodules that are not releases. -# We download the submodule revisions from their repos. -# The list is generated by easybuild-framework/easybuild/scripts/createSubmoduleDeps.sh -local_extract_cmd_pattern = 'tar -C pytorch-%%(version)s/third_party/%s --strip-components=1 -xf %%s' - -source_urls = ['https://github.com/pytorch/pytorch/archive'] - -sources = [ - 'v%(version)s.tar.gz', # PyTorch - { - 'source_urls': ['https://github.com/Maratyszcza/FP16/archive'], - 'download_filename': '4dfe081cf6bcd15db339cf2680b9281b8451eeb3.tar.gz', - 'filename': 'FP16-20200514.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'FP16', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/FXdiv/archive'], - 'download_filename': 'b408327ac2a15ec3e43352421954f5b1967701d1.tar.gz', - 'filename': 'FXdiv-20200417.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'FXdiv', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/NNPACK/archive'], - 'download_filename': '24b55303f5cf65d75844714513a0d1b1409809bd.tar.gz', - 'filename': 'NNPACK-20191007.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'NNPACK', - }, - { - 'source_urls': ['https://github.com/pytorch/QNNPACK/archive'], - 'download_filename': '7d2a4e9931a82adc3814275b6219a03e24e36b4c.tar.gz', - 'filename': 'QNNPACK-20190828.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'QNNPACK', - }, - { - 'source_urls': ['https://github.com/google/XNNPACK/archive'], - 'download_filename': '1b354636b5942826547055252f3b359b54acff95.tar.gz', - 'filename': 'XNNPACK-20200323.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'XNNPACK', - }, - { - 'source_urls': ['https://github.com/google/benchmark/archive'], - 'download_filename': '505be96ab23056580a3a2315abba048f4428b04e.tar.gz', - 'filename': 'benchmark-20180606.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'benchmark', - }, - { - 'source_urls': ['https://github.com/pytorch/cpuinfo/archive'], - 'download_filename': '63b254577ed77a8004a9be6ac707f3dccc4e1fd9.tar.gz', - 'filename': 'cpuinfo-20200611.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'cpuinfo', - }, - { - 'source_urls': ['https://github.com/NVlabs/cub/archive'], - 'download_filename': 'd106ddb991a56c3df1b6d51b2409e36ba8181ce4.tar.gz', - 'filename': 'cub-20200512.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'cub', - }, - { - 'source_urls': ['https://github.com/eigenteam/eigen-git-mirror/archive'], - 'download_filename': 'd41dc4dd74acce21fb210e7625d5d135751fa9e5.tar.gz', - 'filename': 'eigen-20190125.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'eigen', - }, - { - 'source_urls': ['https://github.com/pytorch/fbgemm/archive'], - 'download_filename': '1d710393d5b7588f5de3b83f51c22bbddf095229.tar.gz', - 'filename': 'fbgemm-20200914.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fbgemm', - }, - { - 'source_urls': ['https://github.com/asmjit/asmjit/archive'], - 'download_filename': '9057aa30b620f0662ff51e2230c126a345063064.tar.gz', - 'filename': 'asmjit-20200429.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fbgemm/third_party/asmjit', - }, - { - 'source_urls': ['https://github.com/pytorch/cpuinfo/archive'], - 'download_filename': 'd5e37adf1406cf899d7d9ec1d317c47506ccb970.tar.gz', - 'filename': 'cpuinfo-20190201.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fbgemm/third_party/cpuinfo', - }, - { - 'source_urls': ['https://github.com/google/googletest/archive'], - 'download_filename': '0fc5466dbb9e623029b1ada539717d10bd45e99e.tar.gz', - 'filename': 'googletest-20180925.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fbgemm/third_party/googletest', - }, - { - 'source_urls': ['https://github.com/fmtlib/fmt/archive'], - 'download_filename': 'cd4af11efc9c622896a3e4cb599fa28668ca3d05.tar.gz', - 'filename': 'fmt-20200806.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'fmt', - }, - { - 'source_urls': ['https://github.com/houseroad/foxi/archive'], - 'download_filename': '4aba696ec8f31794fd42880346dc586486205e0a.tar.gz', - 'filename': 'foxi-20200922.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'foxi', - }, - { - 'source_urls': ['https://github.com/google/gemmlowp/archive'], - 'download_filename': '3fb5c176c17c765a3492cd2f0321b0dab712f350.tar.gz', - 'filename': 'gemmlowp-20181126.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'gemmlowp/gemmlowp', - }, - { - 'source_urls': ['https://github.com/facebookincubator/gloo/archive'], - 'download_filename': '3dc0328fe6a9d47bd47c0c6ca145a0d8a21845c6.tar.gz', - 'filename': 'gloo-20200918.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'gloo', - }, - { - 'source_urls': ['https://github.com/google/googletest/archive'], - 'download_filename': '2fe3bd994b3189899d93f1d5a881e725e046fdc2.tar.gz', - 'filename': 'googletest-20180831.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'googletest', - }, - { - 'source_urls': ['https://github.com/intel/ideep/archive'], - 'download_filename': 'ba885200dbbc1f144c7b58eba487378eb324f281.tar.gz', - 'filename': 'ideep-20200915.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'ideep', - }, - { - 'source_urls': ['https://github.com/intel/mkl-dnn/archive'], - 'download_filename': '5ef631a030a6f73131c77892041042805a06064f.tar.gz', - 'filename': 'mkl-dnn-20200909.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'ideep/mkl-dnn', - }, - { - 'source_urls': ['https://github.com/onnx/onnx/archive'], - 'download_filename': 'a82c6a7010e2e332d8f74ad5b0c726fd47c85376.tar.gz', - 'filename': 'onnx-20200531.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'onnx', - }, - { - 'source_urls': ['https://github.com/google/benchmark/archive'], - 'download_filename': 'e776aa0275e293707b6a0901e0e8d8a8a3679508.tar.gz', - 'filename': 'benchmark-20180525.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'onnx/third_party/benchmark', - }, - { - 'source_urls': ['https://github.com/onnx/onnx-tensorrt/archive'], - 'download_filename': 'c153211418a7c57ce071d9ce2a41f8d1c85a878f.tar.gz', - 'filename': 'onnx-tensorrt-20190916.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'onnx-tensorrt', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/psimd/archive'], - 'download_filename': '072586a71b55b7f8c584153d223e95687148a900.tar.gz', - 'filename': 'psimd-20200517.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'psimd', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/pthreadpool/archive'], - 'download_filename': '029c88620802e1361ccf41d1970bd5b07fd6b7bb.tar.gz', - 'filename': 'pthreadpool-20200615.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'pthreadpool', - }, - { - 'source_urls': ['https://github.com/Maratyszcza/PeachPy/archive'], - 'download_filename': '07d8fde8ac45d7705129475c0f94ed8925b93473.tar.gz', - 'filename': 'PeachPy-20180218.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'python-peachpy', - }, - { - 'source_urls': ['https://github.com/shibatch/sleef/archive'], - 'download_filename': '7f523de651585fe25cade462efccca647dcc8d02.tar.gz', - 'filename': 'sleef-20190730.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'sleef', - }, - { - 'source_urls': ['https://github.com/01org/tbb/archive'], - 'download_filename': 'a51a90bc609bb73db8ea13841b5cf7aa4344d4a9.tar.gz', - 'filename': 'tbb-20181009.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tbb', - }, - { - 'source_urls': ['https://github.com/pytorch/tensorpipe/archive'], - 'download_filename': '95ff9319161fcdb3c674d2bb63fac3e94095b343.tar.gz', - 'filename': 'tensorpipe-20200928.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tensorpipe', - }, - { - 'source_urls': ['https://github.com/google/googletest/archive'], - 'download_filename': '2fe3bd994b3189899d93f1d5a881e725e046fdc2.tar.gz', - 'filename': 'googletest-20180831.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tensorpipe/third_party/googletest', - }, - { - 'source_urls': ['https://github.com/google/libnop/archive'], - 'download_filename': 'aa95422ea8c409e3f078d2ee7708a5f59a8b9fa2.tar.gz', - 'filename': 'libnop-20200723.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tensorpipe/third_party/libnop', - }, - { - 'source_urls': ['https://github.com/libuv/libuv/archive'], - 'download_filename': '02a9e1be252b623ee032a3137c0b0c94afbe6809.tar.gz', - 'filename': 'libuv-20200419.tar.gz', - 'extract_cmd': local_extract_cmd_pattern % 'tensorpipe/third_party/libuv', - }, -] -patches = [ - 'PyTorch-1.6.0_fix-test-dataloader-fixed-affinity.patch', - 'PyTorch-1.7.0_fix_test_DistributedDataParallel.patch', - 'PyTorch-1.7.0_fix_typing_python38.patch', - 'PyTorch-1.7.0_fix_remove_backports.patch', -] - -excluded_tests = { - '': [ - # Fails on HDFML. Probably needs a GPU and a network - needs NCCL - 'distributed/test_distributed_fork', - # Fails on HDFML. - 'distributed/test_distributed_spawn', - # Fails on JUWELS (cluster) with SIGXCPU and on JUWELSBOOSTER - 'test_foreach', - 'test_xnnpack_integration', - # Fails on JUSUF - 'distributed/rpc/test_process_group_agent', - 'distributed/rpc/test_tensorpipe_agent', - 'test_autograd', - 'test_jit', - # Disabling all distributed tests because I don't have the whole year. Each test takes 2 hours - 'distributed/nn/jit/test_instantiator', - 'distributed/rpc/test_faulty_agent', - 'distributed/rpc/test_process_group_agent', - 'distributed/rpc/test_tensorpipe_agent', - # Fails on JURECA-DC logins with SIGSEGV - 'distributed/test_c10d_spawn', - # Fails on JURECA-DC - 'test_unary_ufuncs', - # This test fails everywhere: https://github.com/pytorch/pytorch/issues/41242 - # 'test_cpp_extensions_jit', - # Throws illegal memory access due to float16: https://github.com/pytorch/pytorch/issues/41340 - # 'test_torch', - # Potentially problematic save/load issue with test_lstm on only some machines. Tell users to verify save&load! - # https://github.com/pytorch/pytorch/issues/43209 - # 'test_quantization', - ], -} - -runtest = 'cd test && %(python)s run_test.py --verbose %(excluded_tests)s' - -sanity_check_commands = ["python -c 'import caffe2.python'"] -tests = ['PyTorch-check-cpp-extension.py'] - -moduleclass = 'devel' diff --git a/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_remove_backports.patch b/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_remove_backports.patch deleted file mode 100644 index 8449d476f473e8b86edb86591f8512433a8f5482..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_remove_backports.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup.py.orig 2020-11-12 14:13:54.727306249 +0100 -+++ setup.py 2020-11-12 14:14:42.105485851 +0100 -@@ -337,7 +337,7 @@ - ################################################################################ - - # the list of runtime dependencies required by this built package --install_requires = ['future', 'typing_extensions', 'dataclasses'] -+install_requires = ['future'] - - missing_pydep = ''' - Missing build dependency: Unable to `import {importname}`. diff --git a/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_test_DistributedDataParallel.patch b/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_test_DistributedDataParallel.patch deleted file mode 100644 index 18c874c475b985602f88a4113851df806107edce..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_test_DistributedDataParallel.patch +++ /dev/null @@ -1,40 +0,0 @@ -From 4ee880e1645da7c581a04de6cfe30d911f659f57 Mon Sep 17 00:00:00 2001 -From: Alexander Grund <alexander.grund@tu-dresden.de> -Date: Thu, 15 Oct 2020 14:30:03 +0200 -Subject: [PATCH] Distribute GPUs in round robin mode for distributed_test - -The ProcessGroupNCCL::barrier implementation assumes that when -1 GPU/rank is used the GPU-Index equals the rank. Due to NCCL -communicator reuse this then leads to rank 0 using the (kinda) -temporary communicator while the other processes might use other GPUs -leading to them trying to create a new communicator and waiting for -rank 0 until that creates a new (potentially unrelated) one. - -See #46248 for details ---- - torch/testing/_internal/distributed/distributed_test.py | 6 ++---- - 1 file changed, 2 insertions(+), 4 deletions(-) - -diff --git a/torch/testing/_internal/distributed/distributed_test.py b/torch/testing/_internal/distributed/distributed_test.py -index ccaccbda529aa..4d7c23b613d7c 100644 ---- a/torch/testing/_internal/distributed/distributed_test.py -+++ b/torch/testing/_internal/distributed/distributed_test.py -@@ -362,16 +362,14 @@ def _init_multigpu_helper(self): - """ - nGPUs = torch.cuda.device_count() - world_size = dist.get_world_size() -- visible_devices = range(nGPUs) - - if BACKEND == "nccl": - apply_hack_for_nccl() - - nGPUs_per_process = nGPUs // world_size - rank_to_GPU = { -- i: list( -- visible_devices[i * nGPUs_per_process: (i + 1) * nGPUs_per_process] -- ) -+ # Each rank has to get the GPU with the index equal to its rank -+ i: [i + gpu_num * world_size for gpu_num in range(nGPUs_per_process)] - for i in range(world_size) - } - return rank_to_GPU \ No newline at end of file diff --git a/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_typing_python38.patch b/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_typing_python38.patch deleted file mode 100644 index da556eaef46876c75aae77b2f50a0dc5d55e592e..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyTorch/PyTorch-1.7.0_fix_typing_python38.patch +++ /dev/null @@ -1,19 +0,0 @@ ---- test/jit/test_recursive_script.py.orig 2020-11-11 18:40:22.959601697 +0100 -+++ test/jit/test_recursive_script.py 2020-11-11 18:40:44.718092798 +0100 -@@ -2,7 +2,6 @@ - import os - import sys - import typing --import typing_extensions - from typing import List, Dict, Optional, Tuple - - import torch -@@ -174,7 +173,7 @@ - self.checkModule(M1(), (torch.randn(2, 2),)) - - class M2(torch.nn.Module): -- x : typing_extensions.Final[int] -+ x : typing.Final[int] - - def __init__(self): - super().__init__() diff --git a/Golden_Repo/p/PyTorch/PyTorch-check-cpp-extension.py b/Golden_Repo/p/PyTorch/PyTorch-check-cpp-extension.py deleted file mode 100755 index 0a8f6d3e6c59ed07b073470e1a7db54548099e37..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/PyTorch/PyTorch-check-cpp-extension.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python - -# Verify that PyTorch can JIT compile C++ extensions -# This requires at least Ninja and a working C++ compiler, preferably GCC -# -# Heavily based on the PyTorch tutorial for C++ extensions -# Author: Alexander Grund (TU Dresden) - -from torch.utils.cpp_extension import load_inline - -cpp_source = "torch::Tensor test_func(torch::Tensor x) { return x; }" - -module = load_inline(name='inline_extension', - cpp_sources=cpp_source, - functions=['test_func']) -assert module diff --git a/Golden_Repo/p/Python-Neuroimaging/Python-Neuroimaging-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/p/Python-Neuroimaging/Python-Neuroimaging-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 622f5b9e89d37b866ba17498741115df1473198c..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Python-Neuroimaging/Python-Neuroimaging-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,97 +0,0 @@ -easyblock = 'Bundle' - -name = 'Python-Neuroimaging' -version = '2020' -versionsuffix = '-Python-%(pyver)s' - -homepage = '' -description = """Python Neuroimaging is a collection of open source software for neuroimaging using Python.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', '-Python-%(pyver)s'), - ('scikit', '2020', '-Python-%(pyver)s'), - ('h5py', '2.10.0', '-serial-Python-%(pyver)s'), - ('PyOpenCL', '2020.2.2', '-Python-%(pyver)s'), -] - -# Needed to make sure that the sanity check of mdt works -unwanted_env_vars = ['CUDA_VISIBLE_DEVICES'] - -exts_list = [ - ('bz2file', '0.98', { - 'source_urls': ['https://pypi.python.org/packages/source/b/bz2file'], - }), - ('nibabel', '3.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/n/nibabel'], - }), - ('dipy', '1.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/d/dipy'], - }), - ('nilearn', '0.6.2', { - 'source_urls': ['https://pypi.python.org/packages/source/n/nilearn'], - 'patches': ['nilearn_sklearn.patch'] - }), - ('neurdflib', '5.0.1', { - 'source_urls': ['https://pypi.python.org/packages/source/n/neurdflib'], - 'modulename': 'rdflib' - }), - ('etelemetry', '0.2.2', { - 'source_urls': ['https://pypi.python.org/packages/source/e/etelemetry'], - 'source_tmpl': '%(name)s-%(version)s-py3-none-any.whl', - 'unpack_sources': False, - 'use_pip': True, - }), - ('filelock', '3.0.12', { - 'source_urls': ['https://pypi.python.org/packages/source/f/filelock'], - }), - ('ci-info', '0.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/c/ci_info'], - 'modulename': 'ci_info' - }), - ('nipype', '1.5.1', { - 'source_urls': ['https://pypi.python.org/packages/source/n/nipype'], - }), - ('nipy', '0.4.2', { - 'source_urls': ['https://pypi.python.org/packages/source/n/nipy'], - 'patches': ['nipy_numpy_decorators.patch'] - }), - ('mne', '0.21.0', { - 'source_urls': ['https://pypi.python.org/packages/source/m/mne'], - }), - # 0.3.1 and higher import indent from textwrap, which is a Python 3 feature and therefore it doesn't work with py2 - ('mot', '0.11.3', { - 'source_urls': ['https://pypi.python.org/packages/source/m/mot'], - # to disable the test, since it needs a working OpenCL installation, which is not always the case in the logins - 'modulename': 'os' - }), - # 0.9.38 and higher import indent from textwrap, which is a Python 3 feature and therefore it doesn't work with py2 - ('mdt', '1.2.6', { - 'source_urls': ['https://pypi.python.org/packages/source/m/mdt'], - # to disable the test, since it needs a working OpenCL installation, which is not always the case in the logins - 'modulename': 'os' - }), - ('pysptools', '0.15.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pysptools'], - 'modulename': 'pysptools.util' - }), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/Python-Neuroimaging/nilearn_sklearn.patch b/Golden_Repo/p/Python-Neuroimaging/nilearn_sklearn.patch deleted file mode 100644 index fec563f63172fbefce5593d352251975227820b5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Python-Neuroimaging/nilearn_sklearn.patch +++ /dev/null @@ -1,11 +0,0 @@ -diff -ruN nilearn-0.6.2.orig/setup.py nilearn-0.6.2/setup.py ---- nilearn-0.6.2.orig/setup.py 2020-02-24 20:24:10.000000000 +0100 -+++ nilearn-0.6.2/setup.py 2020-10-20 15:04:18.921686291 +0200 -@@ -40,7 +40,6 @@ - if package.startswith('sklearn'): - package = package.replace('sklearn', 'scikit-learn') - required_packages.append(package) -- required_packages.append('sklearn') - return required_packages - - diff --git a/Golden_Repo/p/Python-Neuroimaging/nipy_numpy_decorators.patch b/Golden_Repo/p/Python-Neuroimaging/nipy_numpy_decorators.patch deleted file mode 100644 index d758da622b45be7e76efedc66e07b7aa0aeff374..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Python-Neuroimaging/nipy_numpy_decorators.patch +++ /dev/null @@ -1,372 +0,0 @@ -diff -ruN nipy-0.4.2.orig/nipy/algorithms/clustering/tests/test_vmm.py.orig nipy-0.4.2/nipy/algorithms/clustering/tests/test_vmm.py.orig ---- nipy-0.4.2.orig/nipy/algorithms/clustering/tests/test_vmm.py.orig 1970-01-01 01:00:00.000000000 +0100 -+++ nipy-0.4.2/nipy/algorithms/clustering/tests/test_vmm.py.orig 2020-10-20 14:36:04.203516000 +0200 -@@ -0,0 +1,77 @@ -+""" -+Test the Von-Mises-Fisher mixture model -+ -+Author : Bertrand Thirion, 2010 -+""" -+from __future__ import absolute_import -+ -+import numpy as np -+ -+from ..von_mises_fisher_mixture import (VonMisesMixture, -+ sphere_density, -+ select_vmm, -+ select_vmm_cv) -+ -+ -+from nose.tools import assert_true, assert_equal -+ -+ -+def test_spherical_area(): -+ # test the co_labelling functionality -+ points, area = sphere_density(100) -+ assert_true(np.abs(area.sum()-4*np.pi)<1.e-2) -+ -+ -+def test_von_mises_fisher_density(): -+ # test that a density is indeed computed on the unit sphere for a -+ # one-component and three-component model (k == 1, 3) -+ x = np.random.randn(100, 3) -+ x = (x.T/np.sqrt(np.sum(x**2, 1))).T -+ s, area = sphere_density(100) -+ for k in (1, 3): -+ for precision in [.1, 1., 10., 100.]: -+ for null_class in (False, True): -+ vmd = VonMisesMixture(k, precision, null_class=null_class) -+ vmd.estimate(x) -+ # check that it sums to 1 -+ assert_true(np.abs((vmd.mixture_density(s)*area).sum() - 1) -+ < 1e-2) -+ -+ -+def test_dimension_selection_bic(): -+ # Tests whether dimension selection yields correct results -+ x1 = [0.6, 0.48, 0.64] -+ x2 = [-0.8, 0.48, 0.36] -+ x3 = [0.48, 0.64, -0.6] -+ x = np.random.randn(200, 3) * .1 -+ x[:40] += x1 -+ x[40:150] += x2 -+ x[150:] += x3 -+ x = (x.T / np.sqrt(np.sum(x**2, 1))).T -+ -+ precision = 100. -+ my_vmm = select_vmm(list(range(1,8)), precision, False, x) -+ assert_equal(my_vmm.k, 3) -+ -+ -+def test_dimension_selection_cv(): -+ # Tests the dimension selection using cross validation -+ x1 = [1, 0, 0] -+ x2 = [-1, 0, 0] -+ x = np.random.randn(20, 3)*.1 -+ x[0::2] += x1 -+ x[1::2] += x2 -+ x = (x.T / np.sqrt(np.sum(x**2,1))).T -+ -+ precision = 50. -+ sub = np.repeat(np.arange(10), 2) -+ my_vmm = select_vmm_cv(list(range(1,8)), precision, x, cv_index=sub, -+ null_class=False, ninit=5) -+ z = np.argmax(my_vmm.responsibilities(x), 1) -+ assert_true(len(np.unique(z))>1) -+ assert_true(len(np.unique(z))<4) -+ -+ -+if __name__ == '__main__': -+ import nose -+ nose.run(argv=['', __file__]) -diff -ruN nipy-0.4.2.orig/nipy/algorithms/clustering/tests/test_vmm.py.rej nipy-0.4.2/nipy/algorithms/clustering/tests/test_vmm.py.rej ---- nipy-0.4.2.orig/nipy/algorithms/clustering/tests/test_vmm.py.rej 1970-01-01 01:00:00.000000000 +0100 -+++ nipy-0.4.2/nipy/algorithms/clustering/tests/test_vmm.py.rej 2020-10-20 14:36:04.203570000 +0200 -@@ -0,0 +1,15 @@ -+--- nipy/algorithms/clustering/tests/test_vmm.py -++++ nipy/algorithms/clustering/tests/test_vmm.py -+@@ -13,7 +13,11 @@ -+ select_vmm_cv) -+ -+ from nose.tools import assert_true, assert_equal -+-from numpy.testing import decorators -++try: -++ from numpy.testing import decorators -++except ImportError: -++ from numpy.testing import dec -++ decorators = dec -+ -+ from nibabel.optpkg import optional_package -+ -diff -ruN nipy-0.4.2.orig/nipy/algorithms/diagnostics/tests/test_screen.py nipy-0.4.2/nipy/algorithms/diagnostics/tests/test_screen.py ---- nipy-0.4.2.orig/nipy/algorithms/diagnostics/tests/test_screen.py 2018-01-13 20:37:07.000000000 +0100 -+++ nipy-0.4.2/nipy/algorithms/diagnostics/tests/test_screen.py 2020-10-20 14:36:04.205595000 +0200 -@@ -23,7 +23,13 @@ - from nose.tools import (assert_true, assert_false, assert_equal, assert_raises) - - from numpy.testing import (assert_array_equal, assert_array_almost_equal, -- assert_almost_equal, decorators) -+ assert_almost_equal) -+ -+try: -+ from numpy.testing import decorators -+except ImportError: -+ from numpy.testing import dec -+ decorators = dec - - from nipy.testing import funcfile - from nipy.testing.decorators import needs_mpl_agg -diff -ruN nipy-0.4.2.orig/nipy/algorithms/statistics/formula/formulae.py nipy-0.4.2/nipy/algorithms/statistics/formula/formulae.py ---- nipy-0.4.2.orig/nipy/algorithms/statistics/formula/formulae.py 2018-02-17 12:53:17.000000000 +0100 -+++ nipy-0.4.2/nipy/algorithms/statistics/formula/formulae.py 2020-10-20 14:36:04.206408696 +0200 -@@ -267,10 +267,10 @@ - return sympy.Symbol.__mul__(self, other) - - --class Beta(sympy.symbol.Dummy): -+class Beta(sympy.Dummy): - ''' A symbol tied to a Term `term` ''' - def __new__(cls, name, term): -- new = sympy.symbol.Dummy.__new__(cls, name) -+ new = sympy.Dummy.__new__(cls, name) - new._term = term - return new - -diff -ruN nipy-0.4.2.orig/nipy/algorithms/statistics/rft.py nipy-0.4.2/nipy/algorithms/statistics/rft.py ---- nipy-0.4.2.orig/nipy/algorithms/statistics/rft.py 2018-02-17 12:53:17.000000000 +0100 -+++ nipy-0.4.2/nipy/algorithms/statistics/rft.py 2020-10-20 14:36:04.207667603 +0200 -@@ -20,7 +20,10 @@ - from numpy.linalg import pinv - - from scipy import stats --from scipy.misc import factorial -+try: -+ from scipy.misc import factorial -+except ImportError: -+ from scipy.special import factorial - from scipy.special import gamma, gammaln, beta, hermitenorm - - # Legacy repr printing from numpy. -diff -ruN nipy-0.4.2.orig/nipy/algorithms/statistics/tests/test_rft.py nipy-0.4.2/nipy/algorithms/statistics/tests/test_rft.py ---- nipy-0.4.2.orig/nipy/algorithms/statistics/tests/test_rft.py 2018-02-17 12:53:17.000000000 +0100 -+++ nipy-0.4.2/nipy/algorithms/statistics/tests/test_rft.py 2020-10-20 14:36:04.209578233 +0200 -@@ -6,7 +6,10 @@ - - from scipy.special import gammaln, hermitenorm - import scipy.stats --from scipy.misc import factorial -+try: -+ from scipy.misc import factorial -+except ImportError: -+ from scipy.special import factorial - - from .. import rft - -diff -ruN nipy-0.4.2.orig/nipy/fixes/numpy/testing/nosetester.py nipy-0.4.2/nipy/fixes/numpy/testing/nosetester.py ---- nipy-0.4.2.orig/nipy/fixes/numpy/testing/nosetester.py 2018-01-13 20:37:08.000000000 +0100 -+++ nipy-0.4.2/nipy/fixes/numpy/testing/nosetester.py 2020-10-20 14:36:04.211233186 +0200 -@@ -21,7 +21,7 @@ - - Examples - -------- -- >>> np.testing.nosetester.get_package_name('nonsense') -+ >>> get_package_name('nonsense') - 'numpy' - - """ -diff -ruN nipy-0.4.2.orig/nipy/info.py nipy-0.4.2/nipy/info.py ---- nipy-0.4.2.orig/nipy/info.py 2018-02-19 13:40:18.000000000 +0100 -+++ nipy-0.4.2/nipy/info.py 2020-10-20 14:36:04.212645000 +0200 -@@ -170,7 +170,7 @@ - MICRO = _version_micro - ISRELEASE = _version_extra == '' - VERSION = __version__ --REQUIRES = ["numpy", "scipy", "sympy", "nibabel"] -+REQUIRES = ["numpy", "scipy", "sympy(<1.6)", "nibabel"] - STATUS = 'beta' - - # Versions and locations of optional data packages -diff -ruN nipy-0.4.2.orig/nipy/labs/group/permutation_test.py nipy-0.4.2/nipy/labs/group/permutation_test.py ---- nipy-0.4.2.orig/nipy/labs/group/permutation_test.py 2018-01-13 20:37:08.000000000 +0100 -+++ nipy-0.4.2/nipy/labs/group/permutation_test.py 2020-10-20 14:36:04.214327640 +0200 -@@ -6,7 +6,10 @@ - - # Third-party imports - import numpy as np --import scipy.misc as sm -+try: -+ from scipy.misc import comb -+except ImportError: -+ from scipy.special import comb - import warnings - - # Our own imports -@@ -374,7 +377,7 @@ - elif self.nsamples == 2: - n1,p = self.data1.shape[self.axis], self.data1.shape[1-self.axis] - n2 = self.data2.shape[self.axis] -- max_nperms = sm.comb(n1+n2,n1,exact=1) -+ max_nperms = comb(n1+n2,n1,exact=1) - data = np.concatenate((self.data1,self.data2), self.axis) - if self.vardata1 is not None: - vardata = np.concatenate((self.vardata1,self.vardata2), self.axis) -diff -ruN nipy-0.4.2.orig/nipy/labs/viz_tools/test/test_activation_maps.py nipy-0.4.2/nipy/labs/viz_tools/test/test_activation_maps.py ---- nipy-0.4.2.orig/nipy/labs/viz_tools/test/test_activation_maps.py 2018-01-13 20:37:08.000000000 +0100 -+++ nipy-0.4.2/nipy/labs/viz_tools/test/test_activation_maps.py 2020-10-20 14:36:04.215870000 +0200 -@@ -10,7 +10,7 @@ - try: - import matplotlib as mp - # Make really sure that we don't try to open an Xserver connection. -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - pl.switch_backend('svg') - except ImportError: -@@ -28,7 +28,7 @@ - - def test_demo_plot_map(): - # This is only a smoke test -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - pl.switch_backend('svg') - demo_plot_map() -@@ -38,7 +38,7 @@ - - def test_plot_anat(): - # This is only a smoke test -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - pl.switch_backend('svg') - data = np.zeros((20, 20, 20)) -@@ -85,7 +85,7 @@ - # Test that things don't crash when we give a map with nothing above - # threshold - # This is only a smoke test -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - pl.switch_backend('svg') - data = np.zeros((20, 20, 20)) -diff -ruN nipy-0.4.2.orig/nipy/labs/viz_tools/test/test_cm.py nipy-0.4.2/nipy/labs/viz_tools/test/test_cm.py ---- nipy-0.4.2.orig/nipy/labs/viz_tools/test/test_cm.py 2018-01-13 20:37:08.000000000 +0100 -+++ nipy-0.4.2/nipy/labs/viz_tools/test/test_cm.py 2020-10-20 14:36:04.217044000 +0200 -@@ -8,7 +8,7 @@ - try: - import matplotlib as mp - # Make really sure that we don't try to open an Xserver connection. -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - pl.switch_backend('svg') - except ImportError: -@@ -19,14 +19,14 @@ - - def test_dim_cmap(): - # This is only a smoke test -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - dim_cmap(pl.cm.jet) - - - def test_replace_inside(): - # This is only a smoke test -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - pl.switch_backend('svg') - replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8) -diff -ruN nipy-0.4.2.orig/nipy/labs/viz_tools/test/test_slicers.py nipy-0.4.2/nipy/labs/viz_tools/test/test_slicers.py ---- nipy-0.4.2.orig/nipy/labs/viz_tools/test/test_slicers.py 2018-01-13 20:37:08.000000000 +0100 -+++ nipy-0.4.2/nipy/labs/viz_tools/test/test_slicers.py 2020-10-20 14:36:04.218549000 +0200 -@@ -7,7 +7,7 @@ - try: - import matplotlib as mp - # Make really sure that we don't try to open an Xserver connection. -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - pl.switch_backend('svg') - except ImportError: -@@ -25,7 +25,7 @@ - # conditioned on presence of MNI templated - if not find_mni_template(): - raise nose.SkipTest("MNI Template is absent for the smoke test") -- mp.use('svg', warn=False) -+ mp.use('svg') - import pylab as pl - pl.switch_backend('svg') - demo_ortho_slicer() -diff -ruN nipy-0.4.2.orig/nipy/modalities/fmri/tests/test_aliases.py nipy-0.4.2/nipy/modalities/fmri/tests/test_aliases.py ---- nipy-0.4.2.orig/nipy/modalities/fmri/tests/test_aliases.py 2018-01-13 20:37:08.000000000 +0100 -+++ nipy-0.4.2/nipy/modalities/fmri/tests/test_aliases.py 2020-10-20 14:36:04.219628000 +0200 -@@ -44,7 +44,7 @@ - func = sympy.Function('myfunc') - assert_false(hasattr(func, '_imp_')) - f = implemented_function(func, lambda x: 2*x) -- assert_true(hasattr(func, '_imp_')) -+ assert_true(hasattr(f, '_imp_')) - - - def test_lambdify(): -diff -ruN nipy-0.4.2.orig/nipy/testing/decorators.py nipy-0.4.2/nipy/testing/decorators.py ---- nipy-0.4.2.orig/nipy/testing/decorators.py 2018-01-13 20:37:08.000000000 +0100 -+++ nipy-0.4.2/nipy/testing/decorators.py 2020-10-20 14:36:04.222327000 +0200 -@@ -8,7 +8,10 @@ - from __future__ import print_function - from __future__ import absolute_import - --from numpy.testing.decorators import * -+try: -+ from numpy.testing.decorators import * -+except ImportError: -+ from numpy.testing._private.decorators import * - - from nipy.utils import templates, example_data, DataError - -@@ -124,7 +127,7 @@ - import matplotlib.pyplot as plt - from nose.tools import make_decorator - def agg_func(*args, **kwargs): -- matplotlib.use('agg', warn=False) -+ matplotlib.use('agg') - plt.switch_backend('agg') - return func(*args, **kwargs) - return make_decorator(func)(agg_func) -diff -ruN nipy-0.4.2.orig/nipy/testing/__init__.py nipy-0.4.2/nipy/testing/__init__.py ---- nipy-0.4.2.orig/nipy/testing/__init__.py 2018-02-17 12:53:17.000000000 +0100 -+++ nipy-0.4.2/nipy/testing/__init__.py 2020-10-20 14:36:04.220882000 +0200 -@@ -36,6 +36,13 @@ - anatfile = os.path.join(basedir, 'anatomical.nii.gz') - - from numpy.testing import * -+# Re import decorators/dec depending on numpy's version -+try: -+ from numpy.testing import decorators -+except ImportError: -+ from numpy.testing import dec -+ decorators = dec -+ - # Overwrites numpy.testing.Tester - from .nosetester import NipyNoseTester as Tester - test = Tester().test -diff -ruN nipy-0.4.2.orig/nipy/tests/test_scripts.py nipy-0.4.2/nipy/tests/test_scripts.py ---- nipy-0.4.2.orig/nipy/tests/test_scripts.py 2018-01-13 20:37:08.000000000 +0100 -+++ nipy-0.4.2/nipy/tests/test_scripts.py 2020-10-20 14:36:04.223796000 +0200 -@@ -19,7 +19,13 @@ - from nose.tools import assert_true, assert_false, assert_equal, assert_raises - - from ..testing import funcfile --from numpy.testing import decorators, assert_almost_equal -+from numpy.testing import assert_almost_equal -+ -+try: -+ from numpy.testing import decorators -+except ImportError: -+ from numpy.testing import dec -+ decorators = dec - - from nipy.testing.decorators import make_label_dec - diff --git a/Golden_Repo/p/Python/Python-3.8.5-GCCcore-10.3.0.eb b/Golden_Repo/p/Python/Python-3.8.5-GCCcore-10.3.0.eb deleted file mode 100644 index 7a3eca055235efeb3fc388ebe70e1cef724db6f1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Python/Python-3.8.5-GCCcore-10.3.0.eb +++ /dev/null @@ -1,481 +0,0 @@ -name = 'Python' -version = '3.8.5' - -homepage = 'http://python.org/' -description = """Python is a programming language that lets you work more quickly and integrate your systems -more effectively.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://www.python.org/ftp/%(namelower)s/%(version)s/'] -sources = [SOURCE_TGZ] - -# python needs bzip2 to build the bz2 package -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('libreadline', '8.0'), - ('ncurses', '6.2'), - ('SQLite', '3.32.3'), - ('Tk', '8.6.10'), - ('GMP', '6.2.0'), - ('XZ', '5.2.5'), - ('libxml2', '2.9.10'), - ('libxslt', '1.1.34'), - ('libffi', '3.3'), - ('libyaml', '0.2.5'), - ('PostgreSQL', '12.3'), - ('protobuf', '3.13.0'), - ('gflags', '2.2.2'), - ('libspatialindex', '1.9.3'), # Needed for rtree - ('libjpeg-turbo', '2.0.5'), -] - -builddependencies = [ - ('binutils', '2.36.1'), - # Needed just for unzipping zip files in systems without unzip - ('unzip', '6.0'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -exts_download_dep_fail = True - -local_grakover = '3.99.9' - -# symlink 'pip' command to 'pip3' that is included with Python installation -# required so we can update pip to version included in extensions, using pip -installopts = " && ln -s %(installdir)s/bin/pip3 %(installdir)s/bin/pip" - -# Updated 28.July.2020 -# order is important! -exts_list = [ - ('six', '1.15.0', { - 'source_urls': ['https://pypi.python.org/packages/source/s/six/'], - }), - ('toml', '0.10.1', { - 'source_urls': ['https://pypi.python.org/packages/source/t/toml/'], - }), - ('setuptools', '49.2.0', { - 'source_tmpl': '%(name)s-%(version)s.zip', - 'source_urls': ['https://pypi.python.org/packages/source/s/setuptools/'], - }), - ('pyparsing', '2.4.7', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pyparsing/'], - }), - ('packaging', '20.4', { - 'source_urls': ['https://pypi.python.org/packages/source/p/packaging'] - }), - ('appdirs', '1.4.4', { - 'source_urls': ['https://pypi.python.org/packages/source/a/appdirs'] - }), - ('certifi', '2020.6.20', { - 'source_urls': ['https://pypi.io/packages/source/c/certifi'], - }), - ('pip', '20.2.3', { - 'use_pip': False, - 'source_urls': ['https://pypi.python.org/packages/source/p/pip/'], - }), - ('nose', '1.3.7', { - 'source_urls': ['https://pypi.python.org/packages/source/n/nose/'], - }), - ('blist', '1.3.6', { - 'source_urls': ['https://pypi.python.org/packages/source/b/blist/'], - }), - ('paycheck', '1.0.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/paycheck/'], - 'patches': [ - 'paycheck-1.0.2_setup-open-README-utf8.patch', - ], - }), - ('pbr', '5.4.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pbr/'], - }), - ('lockfile', '0.12.2', { - 'source_urls': ['https://pypi.python.org/packages/source/l/lockfile/'], - }), - ('Cython', '0.29.21', { - 'source_urls': ['https://pypi.python.org/packages/source/c/Cython/'], - }), - ('python-dateutil', '2.8.1', { - 'modulename': 'dateutil', - 'source_urls': ['https://pypi.python.org/packages/source/p/python-dateutil/'], - }), - ('decorator', '4.4.2', { - 'source_urls': ['https://pypi.python.org/packages/source/d/decorator/'], - }), - ('liac-arff', '2.4.0', { - 'modulename': 'arff', - 'source_urls': ['https://pypi.python.org/packages/source/l/liac-arff/'], - }), - ('pycrypto', '2.6.1', { - 'modulename': 'Crypto', - 'source_urls': ['http://ftp.dlitz.net/pub/dlitz/crypto/pycrypto/'], - }), - ('ecdsa', '0.15', { - 'source_urls': ['https://pypi.python.org/packages/source/e/ecdsa/'], - }), - ('pyasn1', '0.4.8', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pyasn1/'], - }), - ('pycparser', '2.20', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pycparser'], - }), - ('cffi', '1.14.1', { - 'source_urls': ['https://pypi.python.org/packages/source/c/cffi'], - }), - ('enum34', '1.1.10', { - 'modulename': 'enum', - 'source_urls': ['https://pypi.python.org/packages/source/e/enum34'], - }), - ('ipaddress', '1.0.23', { - 'source_urls': ['https://pypi.python.org/packages/source/i/ipaddress/'], - }), - ('asn1crypto', '1.3.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/asn1crypto/'], - }), - ('idna', '2.10', { - 'source_urls': ['https://pypi.python.org/packages/source/i/idna/'], - }), - ('cryptography', '3.0', { - 'source_urls': ['https://pypi.python.org/packages/source/c/cryptography/'], - }), - ('PyNaCl', '1.4.0', { - 'modulename': 'nacl', - 'source_urls': ['https://pypi.python.org/packages/source/p/pynacl/'], - }), - ('bcrypt', '3.1.7', { - 'source_urls': ['https://pypi.python.org/packages/source/b/bcrypt/'], - }), - ('paramiko', '2.7.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/paramiko/'], - }), - ('netifaces', '0.10.9', { - 'source_urls': ['https://pypi.python.org/packages/source/n/netifaces'], - }), - ('netaddr', '0.8.0', { - 'source_urls': ['https://pypi.python.org/packages/source/n/netaddr'], - }), - ('funcsigs', '1.0.2', { - 'source_urls': ['https://pypi.python.org/packages/source/f/funcsigs'], - }), - ('mock', '4.0.2', { - 'source_urls': ['https://pypi.python.org/packages/source/m/mock'], - }), - ('pytz', '2020.1', { - 'source_tmpl': '%(name)s-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/p/pytz'], - }), - ('bitstring', '3.1.7', { - 'source_tmpl': '%(name)s-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/b/bitstring'], - }), - ('lxml', '4.5.2', { - 'source_urls': ['https://pypi.python.org/packages/source/l/lxml'], - }), - ('XlsxWriter', '1.2.9', { - 'modulename': 'xlsxwriter', - 'source_urls': ['https://pypi.python.org/packages/source/x/xlsxwriter'], - }), - ('Pygments', '2.6.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/Pygments/'], - 'modulename': 'pygments', - }), - ('backports.shutil_get_terminal_size', '1.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/b/backports.shutil_get_terminal_size/'], - }), - ('wcwidth', '0.2.5', { - 'source_urls': ['https://pypi.python.org/packages/source/w/wcwidth/'], - }), - ('prompt_toolkit', '3.0.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/prompt_toolkit/'], - }), - ('PyYAML', '5.3.1', { - 'modulename': 'yaml', - 'source_urls': ['https://pypi.python.org/packages/source/p/PyYAML/'], - }), - ('psycopg2', '2.8.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/psycopg2/'], - }), - ('protobuf', '3.13.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/protobuf/'], - 'modulename': 'google.protobuf', - }), - ('python-gflags', '3.1.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-gflags/'], - 'modulename': 'gflags', - }), - ('click', '7.1.2', { - 'modulename': 'click', - 'source_urls': ['https://pypi.python.org/packages/source/c/click'], - }), - ('itsdangerous', '1.1.0', { - 'source_urls': ['https://pypi.python.org/packages/source/i/itsdangerous'], - }), - ('Werkzeug', '1.0.1', { - 'source_urls': ['https://pypi.python.org/packages/source/w/werkzeug'], - 'modulename': 'werkzeug' - }), - ('MarkupSafe', '1.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/m/markupsafe'], - 'modulename': 'markupsafe' - }), - ('Jinja2', '2.11.2', { - 'source_urls': ['https://pypi.python.org/packages/source/j/jinja2'], - 'modulename': 'jinja2' - }), - ('Flask', '1.1.2', { - 'source_urls': ['https://pypi.python.org/packages/source/f/flask'], - 'modulename': 'flask' - }), - ('Mako', '1.1.3', { - 'source_urls': ['https://pypi.python.org/packages/source/m/mako'], - 'modulename': 'mako' - }), - ('py', '1.9.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/py'], - }), - ('setuptools_scm', '4.1.2', { - 'source_urls': ['https://pypi.python.org/packages/source/s/setuptools_scm'], - }), - ('attrs', '19.3.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/attrs'], - 'modulename': 'attr' - }), - ('more-itertools', '8.4.0', { - 'source_urls': ['https://pypi.python.org/packages/source/m/more-itertools'], - 'modulename': 'more_itertools' - }), - ('pluggy', '0.13.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pluggy'], - }), - ('atomicwrites', '1.4.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/atomicwrites'], - }), - ('scandir', '1.10.0', { - 'source_urls': ['https://pypi.python.org/packages/source/s/scandir'], - }), - ('pathlib2', '2.3.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pathlib2'], - }), - ('pytest', '5.4.3', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pytest'], - }), - ('pytest-runner', '5.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pytest-runner'], - 'modulename': 'ptr' - }), - ('ply', '3.11', { - 'source_urls': ['https://pypi.python.org/packages/source/p/ply'], - }), - ('ipython_genutils', '0.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/i/ipython_genutils'], - }), - ('traitlets', '4.3.3', { - 'source_urls': ['https://pypi.python.org/packages/source/t/traitlets'], - }), - ('ptyprocess', '0.6.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/ptyprocess'], - }), - ('pickleshare', '0.7.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pickleshare'], - }), - ('pexpect', '4.8.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pexpect'], - }), - ('simplegeneric', '0.8.1', { - 'source_tmpl': 'simplegeneric-0.8.1.zip', - 'source_urls': ['https://pypi.python.org/packages/source/s/simplegeneric'], - }), - ('parso', '0.7.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/parso'], - }), - ('jedi', '0.17.2', { - 'source_urls': ['https://pypi.python.org/packages/source/j/jedi'], - }), - ('backcall', '0.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/b/backcall'], - }), - ('ipython', '7.16.1', { - 'source_urls': ['https://pypi.python.org/packages/source/i/ipython'], - 'modulename': 'IPython', - }), - # Prereqs for jupyterhub - ('urllib3', '1.25.10', { - 'source_urls': ['https://pypi.python.org/packages/source/u/urllib3'], - }), - ('chardet', '3.0.4', { - 'source_urls': ['https://pypi.python.org/packages/source/c/chardet'], - }), - ('requests', '2.24.0', { - 'source_urls': ['https://pypi.python.org/packages/source/r/requests'], - }), - ('SQLAlchemy', '1.3.18', { - 'source_urls': ['https://pypi.python.org/packages/source/s/SQLAlchemy'], - 'modulename': 'sqlalchemy', - }), - ('python-editor', '1.0.4', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-editor'], - 'modulename': 'editor', - }), - ('alembic', '1.4.2', { - 'source_urls': ['https://pypi.python.org/packages/source/a/alembic'], - }), - ('vcversioner', '2.16.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/v/vcversioner'], - }), - ('pyrsistent', '0.16.0', { - 'source_urls': ['https://pypi.io/packages/source/p/pyrsistent'], - }), - ('jsonschema', '3.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/j/jsonschema'], - }), - ('python-oauth2', '1.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-oauth2'], - 'modulename': 'oauth2', - }), - ('wheel', '0.34.2', { - 'source_urls': ['https://pypi.python.org/packages/source/w/wheel'], - }), - ('Rtree', '0.9.4', { - 'source_urls': ['https://pypi.python.org/packages/source/r/rtree'], - }), - ('ClusterShell', '1.8.3', { - 'modulename': 'ClusterShell', - 'source_urls': ['https://pypi.python.org/packages/source/c/ClusterShell'], - }), - ('cloudpickle', '1.5.0', { - 'source_urls': ['https://pypi.python.org/packages/source/c/cloudpickle'], - }), - ('Pillow', '7.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/Pillow'], - 'modulename': 'PIL', - }), - ('toolz', '0.10.0', { - 'source_urls': ['https://pypi.python.org/packages/source/t/toolz'], - }), - ('xvfbwrapper', '0.2.9', { - 'source_urls': ['https://pypi.python.org/packages/source/x/xvfbwrapper'], - }), - ('traits', '6.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/t/traits'], - }), - ('webencodings', '0.5.1', { - 'source_urls': ['https://pypi.python.org/packages/source/w/webencodings'], - }), - ('html5lib', '1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/h/html5lib'], - }), - ('isodate', '0.6.0', { - 'source_urls': ['https://pypi.python.org/packages/source/i/isodate'], - }), - ('rdflib', '5.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/r/rdflib'], - }), - ('SPARQLWrapper', '1.8.5', { - 'source_urls': ['https://pypi.python.org/packages/source/s/SPARQLWrapper'], - 'modulename': 'SPARQLWrapper', - }), - ('networkx', '2.4', { - 'source_urls': ['https://pypi.python.org/packages/source/n/networkx'], - 'source_tmpl': '%(name)s-%(version)s.tar.gz' - }), - ('prov', '1.5.3', { - 'source_urls': ['https://pypi.python.org/packages/source/p/prov'], - }), - ('simplejson', '3.17.2', { - 'source_urls': ['https://pypi.python.org/packages/source/s/simplejson'], - }), - ('configparser', '5.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/c/configparser'], - }), - ('pydot', '1.4.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pydot'], - }), - ('pydotplus', '2.0.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pydotplus'], - }), - ('olefile', '0.46', { - 'source_urls': ['https://pypi.python.org/packages/source/o/olefile'], - 'source_tmpl': '%(name)s-%(version)s.zip', - }), - ('argcomplete', '1.12.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/argcomplete'], - }), - ('grako', local_grakover, { - 'source_urls': ['https://pypi.python.org/packages/source/g/grako'], - 'source_tmpl': '%(name)s-%(version)s.zip', - }), - ('pytest-forked', '1.3.0', { - 'modulename': 'pytest_forked', - 'source_urls': ['https://pypi.python.org/packages/source/p/pytest-forked'], - }), - ('apipkg', '1.5', { - 'source_urls': ['https://pypi.python.org/packages/source/a/apipkg'], - }), - ('execnet', '1.7.1', { - 'source_urls': ['https://pypi.python.org/packages/source/e/execnet'], - }), - ('pytest-xdist', '1.34.0', { - 'modulename': 'xdist', - 'source_urls': ['https://pypi.python.org/packages/source/p/pytest-xdist'], - }), - ('TatSu', '5.5.0', { - 'source_tmpl': '%(name)s-%(version)s.zip', - 'source_urls': ['https://pypi.python.org/packages/source/t/tatsu'], - }), - ('psutil', '5.7.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/psutil'], - }), - ('pep8', '1.7.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pep8'], - }), - ('future', '0.18.2', { - 'source_urls': ['https://pypi.python.org/packages/source/f/future/'], - }), - ('hypothesis', '4.44.2', { - 'source_urls': ['https://pypi.python.org/packages/source/h/hypothesis/'], - }), - ('coverage', '5.1', { - 'source_urls': ['https://pypi.python.org/packages/source/c/coverage/'], - 'checksums': [('sha256', 'f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052')], - }), - ('msgpack', '0.6.2', { - 'source_urls': ['https://pypi.python.org/packages/source/m/msgpack/'], - 'checksums': [('sha256', 'ea3c2f859346fcd55fc46e96885301d9c2f7a36d453f5d8f2967840efa1e1830')], - }), - ('pyzmq', '18.1.0', { - 'modulename': 'zmq', - 'source_urls': ['https://pypi.python.org/packages/source/p/pyzmq/'], - 'checksums': [('sha256', '93f44739db69234c013a16990e43db1aa0af3cf5a4b8b377d028ff24515fbeb3')], - }), -] - -local_grako_egginfo_path = '%(installdir)s/lib/python3.8/site-packages/' -local_grako_egginfo_path += 'grako-%s-py3.8-linux-x86_64.egg/EGG-INFO/' % local_grakover - -postinstallcmds = [ - 'chmod o+r %s/*.txt' % local_grako_egginfo_path, - 'chmod o+r %s/not-zip-safe' % local_grako_egginfo_path, - 'chmod o+r %s/PKG-INFO' % local_grako_egginfo_path, - 'ln -s %(installdir)s/bin/python3-config %(installdir)s/bin/python-config', - # Pip version is not updated mid-stage, so add a var to stop it. - 'mkdir %(installdir)s/etc', - 'printf "[global]\ndisable-pip-version-check = True\n" > %(installdir)s/etc/pip.conf' -] - -buildopts = "PROFILE_TASK='-m test --pgo -x test_socket'" - -# Needed so stuff here is picked up in virtual environments (like in the Jupyter easyconfig) -modextrapaths = { - 'PYTHONPATH': 'lib/python%s/site-packages' % ".".join(version.split(".")[:-1]) -} - -modextravars = { - 'PIP_CONFIG_FILE': '%(installdir)s/etc/pip.conf' -} - -moduleclass = 'lang' diff --git a/Golden_Repo/p/Python/Python-3.8.5-GCCcore-9.3.0.eb b/Golden_Repo/p/Python/Python-3.8.5-GCCcore-9.3.0.eb deleted file mode 100644 index dc57e8d9b9b957121986e18d05a1b9d8352669f5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/Python/Python-3.8.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,481 +0,0 @@ -name = 'Python' -version = '3.8.5' - -homepage = 'http://python.org/' -description = """Python is a programming language that lets you work more quickly and integrate your systems -more effectively.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://www.python.org/ftp/%(namelower)s/%(version)s/'] -sources = [SOURCE_TGZ] - -# python needs bzip2 to build the bz2 package -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('libreadline', '8.0'), - ('ncurses', '6.2'), - ('SQLite', '3.32.3'), - ('Tk', '8.6.10'), - ('GMP', '6.2.0'), - ('XZ', '5.2.5'), - ('libxml2', '2.9.10'), - ('libxslt', '1.1.34'), - ('libffi', '3.3'), - ('libyaml', '0.2.5'), - ('PostgreSQL', '12.3'), - ('protobuf', '3.13.0'), - ('gflags', '2.2.2'), - ('libspatialindex', '1.9.3'), # Needed for rtree - ('libjpeg-turbo', '2.0.5'), -] - -builddependencies = [ - ('binutils', '2.34'), - # Needed just for unzipping zip files in systems without unzip - ('unzip', '6.0'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -exts_download_dep_fail = True - -local_grakover = '3.99.9' - -# symlink 'pip' command to 'pip3' that is included with Python installation -# required so we can update pip to version included in extensions, using pip -installopts = " && ln -s %(installdir)s/bin/pip3 %(installdir)s/bin/pip" - -# Updated 28.July.2020 -# order is important! -exts_list = [ - ('six', '1.15.0', { - 'source_urls': ['https://pypi.python.org/packages/source/s/six/'], - }), - ('toml', '0.10.1', { - 'source_urls': ['https://pypi.python.org/packages/source/t/toml/'], - }), - ('setuptools', '49.2.0', { - 'source_tmpl': '%(name)s-%(version)s.zip', - 'source_urls': ['https://pypi.python.org/packages/source/s/setuptools/'], - }), - ('pyparsing', '2.4.7', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pyparsing/'], - }), - ('packaging', '20.4', { - 'source_urls': ['https://pypi.python.org/packages/source/p/packaging'] - }), - ('appdirs', '1.4.4', { - 'source_urls': ['https://pypi.python.org/packages/source/a/appdirs'] - }), - ('certifi', '2020.6.20', { - 'source_urls': ['https://pypi.io/packages/source/c/certifi'], - }), - ('pip', '20.2.3', { - 'use_pip': False, - 'source_urls': ['https://pypi.python.org/packages/source/p/pip/'], - }), - ('nose', '1.3.7', { - 'source_urls': ['https://pypi.python.org/packages/source/n/nose/'], - }), - ('blist', '1.3.6', { - 'source_urls': ['https://pypi.python.org/packages/source/b/blist/'], - }), - ('paycheck', '1.0.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/paycheck/'], - 'patches': [ - 'paycheck-1.0.2_setup-open-README-utf8.patch', - ], - }), - ('pbr', '5.4.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pbr/'], - }), - ('lockfile', '0.12.2', { - 'source_urls': ['https://pypi.python.org/packages/source/l/lockfile/'], - }), - ('Cython', '0.29.21', { - 'source_urls': ['https://pypi.python.org/packages/source/c/Cython/'], - }), - ('python-dateutil', '2.8.1', { - 'modulename': 'dateutil', - 'source_urls': ['https://pypi.python.org/packages/source/p/python-dateutil/'], - }), - ('decorator', '4.4.2', { - 'source_urls': ['https://pypi.python.org/packages/source/d/decorator/'], - }), - ('liac-arff', '2.4.0', { - 'modulename': 'arff', - 'source_urls': ['https://pypi.python.org/packages/source/l/liac-arff/'], - }), - ('pycrypto', '2.6.1', { - 'modulename': 'Crypto', - 'source_urls': ['http://ftp.dlitz.net/pub/dlitz/crypto/pycrypto/'], - }), - ('ecdsa', '0.15', { - 'source_urls': ['https://pypi.python.org/packages/source/e/ecdsa/'], - }), - ('pyasn1', '0.4.8', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pyasn1/'], - }), - ('pycparser', '2.20', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pycparser'], - }), - ('cffi', '1.14.1', { - 'source_urls': ['https://pypi.python.org/packages/source/c/cffi'], - }), - ('enum34', '1.1.10', { - 'modulename': 'enum', - 'source_urls': ['https://pypi.python.org/packages/source/e/enum34'], - }), - ('ipaddress', '1.0.23', { - 'source_urls': ['https://pypi.python.org/packages/source/i/ipaddress/'], - }), - ('asn1crypto', '1.3.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/asn1crypto/'], - }), - ('idna', '2.10', { - 'source_urls': ['https://pypi.python.org/packages/source/i/idna/'], - }), - ('cryptography', '3.0', { - 'source_urls': ['https://pypi.python.org/packages/source/c/cryptography/'], - }), - ('PyNaCl', '1.4.0', { - 'modulename': 'nacl', - 'source_urls': ['https://pypi.python.org/packages/source/p/pynacl/'], - }), - ('bcrypt', '3.1.7', { - 'source_urls': ['https://pypi.python.org/packages/source/b/bcrypt/'], - }), - ('paramiko', '2.7.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/paramiko/'], - }), - ('netifaces', '0.10.9', { - 'source_urls': ['https://pypi.python.org/packages/source/n/netifaces'], - }), - ('netaddr', '0.8.0', { - 'source_urls': ['https://pypi.python.org/packages/source/n/netaddr'], - }), - ('funcsigs', '1.0.2', { - 'source_urls': ['https://pypi.python.org/packages/source/f/funcsigs'], - }), - ('mock', '4.0.2', { - 'source_urls': ['https://pypi.python.org/packages/source/m/mock'], - }), - ('pytz', '2020.1', { - 'source_tmpl': '%(name)s-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/p/pytz'], - }), - ('bitstring', '3.1.7', { - 'source_tmpl': '%(name)s-%(version)s.tar.gz', - 'source_urls': ['https://pypi.python.org/packages/source/b/bitstring'], - }), - ('lxml', '4.5.2', { - 'source_urls': ['https://pypi.python.org/packages/source/l/lxml'], - }), - ('XlsxWriter', '1.2.9', { - 'modulename': 'xlsxwriter', - 'source_urls': ['https://pypi.python.org/packages/source/x/xlsxwriter'], - }), - ('Pygments', '2.6.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/Pygments/'], - 'modulename': 'pygments', - }), - ('backports.shutil_get_terminal_size', '1.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/b/backports.shutil_get_terminal_size/'], - }), - ('wcwidth', '0.2.5', { - 'source_urls': ['https://pypi.python.org/packages/source/w/wcwidth/'], - }), - ('prompt_toolkit', '3.0.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/prompt_toolkit/'], - }), - ('PyYAML', '5.3.1', { - 'modulename': 'yaml', - 'source_urls': ['https://pypi.python.org/packages/source/p/PyYAML/'], - }), - ('psycopg2', '2.8.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/psycopg2/'], - }), - ('protobuf', '3.13.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/protobuf/'], - 'modulename': 'google.protobuf', - }), - ('python-gflags', '3.1.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-gflags/'], - 'modulename': 'gflags', - }), - ('click', '7.1.2', { - 'modulename': 'click', - 'source_urls': ['https://pypi.python.org/packages/source/c/click'], - }), - ('itsdangerous', '1.1.0', { - 'source_urls': ['https://pypi.python.org/packages/source/i/itsdangerous'], - }), - ('Werkzeug', '1.0.1', { - 'source_urls': ['https://pypi.python.org/packages/source/w/werkzeug'], - 'modulename': 'werkzeug' - }), - ('MarkupSafe', '1.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/m/markupsafe'], - 'modulename': 'markupsafe' - }), - ('Jinja2', '2.11.2', { - 'source_urls': ['https://pypi.python.org/packages/source/j/jinja2'], - 'modulename': 'jinja2' - }), - ('Flask', '1.1.2', { - 'source_urls': ['https://pypi.python.org/packages/source/f/flask'], - 'modulename': 'flask' - }), - ('Mako', '1.1.3', { - 'source_urls': ['https://pypi.python.org/packages/source/m/mako'], - 'modulename': 'mako' - }), - ('py', '1.9.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/py'], - }), - ('setuptools_scm', '4.1.2', { - 'source_urls': ['https://pypi.python.org/packages/source/s/setuptools_scm'], - }), - ('attrs', '19.3.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/attrs'], - 'modulename': 'attr' - }), - ('more-itertools', '8.4.0', { - 'source_urls': ['https://pypi.python.org/packages/source/m/more-itertools'], - 'modulename': 'more_itertools' - }), - ('pluggy', '0.13.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pluggy'], - }), - ('atomicwrites', '1.4.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/atomicwrites'], - }), - ('scandir', '1.10.0', { - 'source_urls': ['https://pypi.python.org/packages/source/s/scandir'], - }), - ('pathlib2', '2.3.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pathlib2'], - }), - ('pytest', '5.4.3', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pytest'], - }), - ('pytest-runner', '5.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pytest-runner'], - 'modulename': 'ptr' - }), - ('ply', '3.11', { - 'source_urls': ['https://pypi.python.org/packages/source/p/ply'], - }), - ('ipython_genutils', '0.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/i/ipython_genutils'], - }), - ('traitlets', '4.3.3', { - 'source_urls': ['https://pypi.python.org/packages/source/t/traitlets'], - }), - ('ptyprocess', '0.6.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/ptyprocess'], - }), - ('pickleshare', '0.7.5', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pickleshare'], - }), - ('pexpect', '4.8.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pexpect'], - }), - ('simplegeneric', '0.8.1', { - 'source_tmpl': 'simplegeneric-0.8.1.zip', - 'source_urls': ['https://pypi.python.org/packages/source/s/simplegeneric'], - }), - ('parso', '0.7.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/parso'], - }), - ('jedi', '0.17.2', { - 'source_urls': ['https://pypi.python.org/packages/source/j/jedi'], - }), - ('backcall', '0.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/b/backcall'], - }), - ('ipython', '7.16.1', { - 'source_urls': ['https://pypi.python.org/packages/source/i/ipython'], - 'modulename': 'IPython', - }), - # Prereqs for jupyterhub - ('urllib3', '1.25.10', { - 'source_urls': ['https://pypi.python.org/packages/source/u/urllib3'], - }), - ('chardet', '3.0.4', { - 'source_urls': ['https://pypi.python.org/packages/source/c/chardet'], - }), - ('requests', '2.24.0', { - 'source_urls': ['https://pypi.python.org/packages/source/r/requests'], - }), - ('SQLAlchemy', '1.3.18', { - 'source_urls': ['https://pypi.python.org/packages/source/s/SQLAlchemy'], - 'modulename': 'sqlalchemy', - }), - ('python-editor', '1.0.4', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-editor'], - 'modulename': 'editor', - }), - ('alembic', '1.4.2', { - 'source_urls': ['https://pypi.python.org/packages/source/a/alembic'], - }), - ('vcversioner', '2.16.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/v/vcversioner'], - }), - ('pyrsistent', '0.16.0', { - 'source_urls': ['https://pypi.io/packages/source/p/pyrsistent'], - }), - ('jsonschema', '3.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/j/jsonschema'], - }), - ('python-oauth2', '1.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-oauth2'], - 'modulename': 'oauth2', - }), - ('wheel', '0.34.2', { - 'source_urls': ['https://pypi.python.org/packages/source/w/wheel'], - }), - ('Rtree', '0.9.4', { - 'source_urls': ['https://pypi.python.org/packages/source/r/rtree'], - }), - ('ClusterShell', '1.8.3', { - 'modulename': 'ClusterShell', - 'source_urls': ['https://pypi.python.org/packages/source/c/ClusterShell'], - }), - ('cloudpickle', '1.5.0', { - 'source_urls': ['https://pypi.python.org/packages/source/c/cloudpickle'], - }), - ('Pillow', '7.2.0', { - 'source_urls': ['https://pypi.python.org/packages/source/p/Pillow'], - 'modulename': 'PIL', - }), - ('toolz', '0.10.0', { - 'source_urls': ['https://pypi.python.org/packages/source/t/toolz'], - }), - ('xvfbwrapper', '0.2.9', { - 'source_urls': ['https://pypi.python.org/packages/source/x/xvfbwrapper'], - }), - ('traits', '6.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/t/traits'], - }), - ('webencodings', '0.5.1', { - 'source_urls': ['https://pypi.python.org/packages/source/w/webencodings'], - }), - ('html5lib', '1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/h/html5lib'], - }), - ('isodate', '0.6.0', { - 'source_urls': ['https://pypi.python.org/packages/source/i/isodate'], - }), - ('rdflib', '5.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/r/rdflib'], - }), - ('SPARQLWrapper', '1.8.5', { - 'source_urls': ['https://pypi.python.org/packages/source/s/SPARQLWrapper'], - 'modulename': 'SPARQLWrapper', - }), - ('networkx', '2.4', { - 'source_urls': ['https://pypi.python.org/packages/source/n/networkx'], - 'source_tmpl': '%(name)s-%(version)s.tar.gz' - }), - ('prov', '1.5.3', { - 'source_urls': ['https://pypi.python.org/packages/source/p/prov'], - }), - ('simplejson', '3.17.2', { - 'source_urls': ['https://pypi.python.org/packages/source/s/simplejson'], - }), - ('configparser', '5.0.0', { - 'source_urls': ['https://pypi.python.org/packages/source/c/configparser'], - }), - ('pydot', '1.4.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pydot'], - }), - ('pydotplus', '2.0.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pydotplus'], - }), - ('olefile', '0.46', { - 'source_urls': ['https://pypi.python.org/packages/source/o/olefile'], - 'source_tmpl': '%(name)s-%(version)s.zip', - }), - ('argcomplete', '1.12.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/argcomplete'], - }), - ('grako', local_grakover, { - 'source_urls': ['https://pypi.python.org/packages/source/g/grako'], - 'source_tmpl': '%(name)s-%(version)s.zip', - }), - ('pytest-forked', '1.3.0', { - 'modulename': 'pytest_forked', - 'source_urls': ['https://pypi.python.org/packages/source/p/pytest-forked'], - }), - ('apipkg', '1.5', { - 'source_urls': ['https://pypi.python.org/packages/source/a/apipkg'], - }), - ('execnet', '1.7.1', { - 'source_urls': ['https://pypi.python.org/packages/source/e/execnet'], - }), - ('pytest-xdist', '1.34.0', { - 'modulename': 'xdist', - 'source_urls': ['https://pypi.python.org/packages/source/p/pytest-xdist'], - }), - ('TatSu', '5.5.0', { - 'source_tmpl': '%(name)s-%(version)s.zip', - 'source_urls': ['https://pypi.python.org/packages/source/t/tatsu'], - }), - ('psutil', '5.7.2', { - 'source_urls': ['https://pypi.python.org/packages/source/p/psutil'], - }), - ('pep8', '1.7.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/pep8'], - }), - ('future', '0.18.2', { - 'source_urls': ['https://pypi.python.org/packages/source/f/future/'], - }), - ('hypothesis', '4.44.2', { - 'source_urls': ['https://pypi.python.org/packages/source/h/hypothesis/'], - }), - ('coverage', '5.1', { - 'source_urls': ['https://pypi.python.org/packages/source/c/coverage/'], - 'checksums': [('sha256', 'f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052')], - }), - ('msgpack', '0.6.2', { - 'source_urls': ['https://pypi.python.org/packages/source/m/msgpack/'], - 'checksums': [('sha256', 'ea3c2f859346fcd55fc46e96885301d9c2f7a36d453f5d8f2967840efa1e1830')], - }), - ('pyzmq', '18.1.0', { - 'modulename': 'zmq', - 'source_urls': ['https://pypi.python.org/packages/source/p/pyzmq/'], - 'checksums': [('sha256', '93f44739db69234c013a16990e43db1aa0af3cf5a4b8b377d028ff24515fbeb3')], - }), -] - -local_grako_egginfo_path = '%(installdir)s/lib/python3.8/site-packages/' -local_grako_egginfo_path += 'grako-%s-py3.8-linux-x86_64.egg/EGG-INFO/' % local_grakover - -postinstallcmds = [ - 'chmod o+r %s/*.txt' % local_grako_egginfo_path, - 'chmod o+r %s/not-zip-safe' % local_grako_egginfo_path, - 'chmod o+r %s/PKG-INFO' % local_grako_egginfo_path, - 'ln -s %(installdir)s/bin/python3-config %(installdir)s/bin/python-config', - # Pip version is not updated mid-stage, so add a var to stop it. - 'mkdir %(installdir)s/etc', - 'printf "[global]\ndisable-pip-version-check = True\n" > %(installdir)s/etc/pip.conf' -] - -buildopts = "PROFILE_TASK='-m test --pgo -x test_socket'" - -# Needed so stuff here is picked up in virtual environments (like in the Jupyter easyconfig) -modextrapaths = { - 'PYTHONPATH': 'lib/python%s/site-packages' % ".".join(version.split(".")[:-1]) -} - -modextravars = { - 'PIP_CONFIG_FILE': '%(installdir)s/etc/pip.conf' -} - -moduleclass = 'lang' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gompi-2020.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gompi-2020.eb deleted file mode 100644 index 06bd3df7f9eb7ef41eba455eb10c0ce61d603ad1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gompi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gompi-2021.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gompi-2021.eb deleted file mode 100644 index 6b2263ef4bd38e450c4935a1edae817bd382d59d..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gompi-2021.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -preconfigopts = 'export FCFLAGS="-fallow-argument-mismatch $FCFLAGS" && ' -preconfigopts += 'export FFLAGS="-fallow-argument-mismatch $FFLAGS" && ' -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gpsmpi-2020.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gpsmpi-2020.eb deleted file mode 100644 index 68ea61fb305d2dd2d41077c76f8a0e4e6bbc7851..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gpsmpi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gpsmpi-2021.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gpsmpi-2021.eb deleted file mode 100644 index 671de8c6e750af0777384076bb2374b80035b921..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-gpsmpi-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iimpi-2020.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iimpi-2020.eb deleted file mode 100644 index 377a1a670b744a0f689fe720f55c736badd3cab6..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iimpi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iimpi-2021.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iimpi-2021.eb deleted file mode 100644 index 59e98fcad63839bd8faf79b5eab3360a634045cd..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iimpi-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iompi-2020.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iompi-2020.eb deleted file mode 100644 index 982ba9e407bb97743efafb3c20846db0c8953e26..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iompi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iompi-2021.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iompi-2021.eb deleted file mode 100644 index 6a2c896ba9a9ea2cb77025e76835e4b4d5812db1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-iompi-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2020-mt.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2020-mt.eb deleted file mode 100644 index 8d70a88ed8a72f0be41151ecf08955beb082f066..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2020-mt.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020-mt'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2020.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2020.eb deleted file mode 100644 index 223c1d7aa9ca2fa37178c07437eaec7280d56d27..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2021.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2021.eb deleted file mode 100644 index 705269ce9307394a1de6953090952fd160b9ecda..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-ipsmpi-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2020.1.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2020.1.eb deleted file mode 100644 index 5c17309643c2c283a5c83814c67075b97cd58ca8..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2020.1.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2020.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2020.eb deleted file mode 100644 index 0337cfa5847cadc102eea3e16240e9208c63292c..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2021.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2021.eb deleted file mode 100644 index 89dc178bb71d9f19436534f4f1b82f18d4a8b7d7..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-npsmpic-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-nvompic-2021.eb b/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-nvompic-2021.eb deleted file mode 100644 index 79486486c84a49f9ce2ea1af39dabc53fce61a41..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel-netcdf/parallel-netcdf-1.12.1-nvompic-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel-netcdf' -version = '1.12.1' - -homepage = 'http://trac.mcs.anl.gov/projects/parallel-netcdf' -description = """PnetCDF is a library providing high-performance parallel I/O while still maintaining file-format -compatibility with Unidata's NetCDF, specifically the formats of CDF-1 and CDF-2. - -Although NetCDF supports parallel I/O starting from version 4, the files must be in HDF5 format. PnetCDF is currently -the only choice for carrying out parallel I/O on files that are in classic formats -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'nvompic', 'version': '2021'} -toolchainopts = {'usempi': True, 'pic': True} - -source_urls = ['https://parallel-netcdf.github.io/Release/'] -sources = ['pnetcdf-%(version)s.tar.gz'] -checksums = ['56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2'] - -configopts = '--enable-shared' - -builddependencies = [ - ('M4', '1.4.18'), -] - -sanity_check_paths = { - 'files': ['include/pnetcdf.h', 'include/pnetcdf.inc', 'include/pnetcdf.mod', 'lib/libpnetcdf.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/parallel/parallel-20201122-GCCcore-9.3.0.eb b/Golden_Repo/p/parallel/parallel-20201122-GCCcore-9.3.0.eb deleted file mode 100644 index 4e09e072ed8ef04d6759745a29e7657ef8f0accb..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/parallel/parallel-20201122-GCCcore-9.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'parallel' -version = '20201122' - -homepage = 'https://savannah.gnu.org/projects/parallel/' -description = """parallel: Build and execute shell commands in parallel""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_BZ2] - -builddependencies = [('binutils', '2.34')] -dependencies = [ - ('Perl', '5.32.0'), -] - -sanity_check_paths = { - 'files': ['bin/parallel'], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-Python-3.8.5.eb deleted file mode 100644 index 26e81b6aba0dfd5cf1e81c87516d0561069a4515..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-Python-3.8.5.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14'), -] - -download_dep_fail = True - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-complex-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-complex-Python-3.8.5.eb deleted file mode 100644 index 7da6e1763cd413c40d151fc2db3c63dec6610ae1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-complex-Python-3.8.5.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-complex%s' % local_pysuffix - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', local_pysuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14', '-complex'), -] - -download_dep_fail = True - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-int8-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-int8-Python-3.8.5.eb deleted file mode 100644 index dd85b79d19d2c7034e91dccfebdf1e99c36e7567..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-gpsmkl-2020-int8-Python-3.8.5.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-int8%s' % local_pysuffix - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', local_pysuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14', '-int8'), -] - -download_dep_fail = True - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-Python-3.8.5.eb deleted file mode 100644 index 5fea091afe6ee5b1b41624b0c76ee2535900e4f8..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-Python-3.8.5.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'intel', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14'), -] - -download_dep_fail = True - -prebuildopts = 'export LDSHARED="-shared" && ' - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-complex-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-complex-Python-3.8.5.eb deleted file mode 100644 index 41cba2130dedc7763db20baf38bff36995fe5165..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-complex-Python-3.8.5.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-complex%s' % local_pysuffix - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'intel', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', local_pysuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14', '-complex'), -] - -download_dep_fail = True - -prebuildopts = 'export LDSHARED="-shared" && ' - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-int8-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-int8-Python-3.8.5.eb deleted file mode 100644 index d51b817291e1d542e08ab0d116d5731067c60904..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-2020-int8-Python-3.8.5.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-int8%s' % local_pysuffix - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'intel', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', local_pysuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14', '-int8'), -] - -download_dep_fail = True - -prebuildopts = 'export LDSHARED="-shared" && ' - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-Python-3.8.5.eb deleted file mode 100644 index 5e0d44859d605291c2f7b1e8940e37e3497ec9a8..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-Python-3.8.5.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'intel-para', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14'), -] - -download_dep_fail = True - -prebuildopts = 'export LDSHARED="-shared" && ' - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-complex-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-complex-Python-3.8.5.eb deleted file mode 100644 index 3380115332843bffc48fe7331ac41f3503599f84..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-complex-Python-3.8.5.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-complex%s' % local_pysuffix - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'intel-para', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', local_pysuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14', '-complex'), -] - -download_dep_fail = True - -prebuildopts = 'export LDSHARED="-shared" && ' - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-int8-Python-3.8.5.eb b/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-int8-Python-3.8.5.eb deleted file mode 100644 index f6f9fb27ea07c62f26f921d238a29bbd8ead000a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/petsc4py/petsc4py-3.13.0-intel-para-2020-int8-Python-3.8.5.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'petsc4py' -version = '3.13.0' -local_pysuffix = '-Python-%(pyver)s' -versionsuffix = '-int8%s' % local_pysuffix - -homepage = 'https://bitbucket.org/petsc/petsc4py' -description = "petsc4py are Python bindings for PETSc, the Portable, Extensible Toolchain for Scientific Computation." - -toolchain = {'name': 'intel-para', 'version': '2020'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', local_pysuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PETSc', '3.14', '-int8'), -] - -download_dep_fail = True - -prebuildopts = 'export LDSHARED="-shared" && ' - -req_py_majver = 3 -req_py_minver = 0 - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/pixman/pixman-0.40.0-GCCcore-10.3.0.eb b/Golden_Repo/p/pixman/pixman-0.40.0-GCCcore-10.3.0.eb deleted file mode 100644 index 49e3b7ce193c335b83a8a89e55c548568154f449..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pixman/pixman-0.40.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = "pixman" -version = '0.40.0' - -homepage = 'http://www.pixman.org/' -description = """Pixman is a low-level software library for pixel manipulation, providing features such as image -compositing and trapezoid rasterization. Important users of pixman are the cairo graphics library and the X server. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://cairographics.org/releases/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1') -] - -sanity_check_paths = { - 'files': ['lib/libpixman-1.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/pixman/pixman-0.40.0-GCCcore-9.3.0.eb b/Golden_Repo/p/pixman/pixman-0.40.0-GCCcore-9.3.0.eb deleted file mode 100644 index b0e6ccc06baed58cb93621aadc554138eec8daf2..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pixman/pixman-0.40.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = "pixman" -version = '0.40.0' - -homepage = 'http://www.pixman.org/' -description = """Pixman is a low-level software library for pixel manipulation, providing features such as image -compositing and trapezoid rasterization. Important users of pixman are the cairo graphics library and the X server. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://cairographics.org/releases/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34') -] - -sanity_check_paths = { - 'files': ['lib/libpixman-1.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Golden_Repo/p/pkg-config/pkg-config-0.29.2-GCCcore-10.3.0.eb b/Golden_Repo/p/pkg-config/pkg-config-0.29.2-GCCcore-10.3.0.eb deleted file mode 100644 index 75233c47b81b3567d97737c29bf1340cf8ed1919..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pkg-config/pkg-config-0.29.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'pkg-config' -version = '0.29.2' - -homepage = 'https://www.freedesktop.org/wiki/Software/pkg-config/' - -description = """ - pkg-config is a helper tool used when compiling applications and libraries. - It helps you insert the correct compiler options on the command line so an - application can use gcc -o test test.c `pkg-config --libs --cflags glib-2.0` - for instance, rather than hard-coding values on where to find glib (or other - libraries). -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://pkg-config.freedesktop.org/releases/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6fc69c01688c9458a57eb9a1664c9aba372ccda420a02bf4429fe610e7e7d591'] - -builddependencies = [('binutils', '2.36.1')] - -# don't use PAX, it might break. -tar_config_opts = True - -configopts = " --with-internal-glib" -configopts += " --with-pc-path=/usr/lib64/pkgconfig:/usr/share/pkgconfig" - -sanity_check_paths = { - 'files': ['bin/pkg-config'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/pkg-config/pkg-config-0.29.2-GCCcore-9.3.0.eb b/Golden_Repo/p/pkg-config/pkg-config-0.29.2-GCCcore-9.3.0.eb deleted file mode 100644 index 948e17b4d7142580837be6ff833a51bf0118bccc..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pkg-config/pkg-config-0.29.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'pkg-config' -version = '0.29.2' - -homepage = 'http://www.freedesktop.org/wiki/Software/pkg-config/' -description = """pkg-config is a helper tool used when compiling applications and libraries. It helps you insert the - correct compiler options on the command line so an application can use - gcc -o test test.c `pkg-config --libs --cflags glib-2.0` - for instance, rather than hard-coding values on where to find glib (or other libraries). -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://pkgconfig.freedesktop.org/releases/'] - -builddependencies = [('binutils', '2.34')] - -# don't use PAX, it might break. -tar_config_opts = True - -configopts = " --with-internal-glib" -configopts += " --with-pc-path=/usr/lib64/pkgconfig:/usr/share/pkgconfig" - -sanity_check_paths = { - 'files': ['bin/pkg-config'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/pkg-config/pkg-config-0.29.2.eb b/Golden_Repo/p/pkg-config/pkg-config-0.29.2.eb deleted file mode 100644 index 41a6f9866314d433acb8f5ad279fefc6c05f9143..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pkg-config/pkg-config-0.29.2.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'pkg-config' -version = '0.29.2' - -homepage = 'http://www.freedesktop.org/wiki/Software/pkg-config/' -description = """pkg-config is a helper tool used when compiling applications and libraries. It helps you insert the - correct compiler options on the command line so an application can use - gcc -o test test.c `pkg-config --libs --cflags glib-2.0` - for instance, rather than hard-coding values on where to find glib (or other libraries). -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://pkgconfig.freedesktop.org/releases/'] - -builddependencies = [('binutils', '2.34')] - -# don't use PAX, it might break. -tar_config_opts = True - -configopts = " --with-internal-glib" -# add pkg-config path of CentOS packages -configopts += " --with-pc-path=/usr/lib64/pkgconfig:/usr/share/pkgconfig" - -sanity_check_paths = { - 'files': ['bin/pkg-config'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/pkgconfig/pkgconfig-1.5.1-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/p/pkgconfig/pkgconfig-1.5.1-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index a3b520af5d2dd2d1b57281b39d3f69ba03a10225..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pkgconfig/pkgconfig-1.5.1-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'pkgconfig' -version = '1.5.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://github.com/matze/pkgconfig' -description = """pkgconfig is a Python module to interface with the pkg-config command line tool""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('pkg-config', '0.29.2'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/pkgconfig/pkgconfig-1.5.1-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/pkgconfig/pkgconfig-1.5.1-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 8d0223f4db9301b2f9beef70ad32874327944fd1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pkgconfig/pkgconfig-1.5.1-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'pkgconfig' -version = '1.5.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://github.com/matze/pkgconfig' -description = """pkgconfig is a Python module to interface with the pkg-config command line tool""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('pkg-config', '0.29.2'), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'data' diff --git a/Golden_Repo/p/popt/popt-1.16.eb b/Golden_Repo/p/popt/popt-1.16.eb deleted file mode 100644 index 26a3e9f43bd3181850ecc73712a76d18e55cd9a1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/popt/popt-1.16.eb +++ /dev/null @@ -1,22 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'popt' -version = '1.16' - -homepage = 'http://freecode.com/projects/popt' -description = """Popt is a C library for parsing command line parameters. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = [SOURCE_TAR_GZ] -source_urls = ['http://rpm5.org/files/%(name)s/'] - -sanity_check_paths = { - 'files': ['include/%(name)s.h', ('lib/libpopt.a', 'lib64/libpopt.a'), ('lib/libpopt.so', 'lib64/libpopt.so')], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/protobuf-python/protobuf-python-3.13.0-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/protobuf-python/protobuf-python-3.13.0-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 664bab39432e214a7c0a1149d8da6f89af9b1a2c..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/protobuf-python/protobuf-python-3.13.0-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'protobuf-python' -version = '3.13.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/google/protobuf/' -description = """Python Protocol Buffers runtime library.""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://pypi.python.org/packages/source/p/protobuf'] -sources = [{'download_filename': 'protobuf-%(version)s.tar.gz', 'filename': SOURCE_TAR_GZ}] -checksums = ['6a82e0c8bb2bf58f606040cc5814e07715b2094caeba281e2e7d0b0e2e397db5'] - -dependencies = [ - ('Python', '3.8.5'), - ('protobuf', version) -] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -# Make sure protobuf is installed as a regular folder or it will not be found if -# other google packages are installed in other site-packages folders -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/google/protobuf'], -} - -options = {'modulename': 'google.protobuf'} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/protobuf/protobuf-3.12.4-GCCcore-10.3.0.eb b/Golden_Repo/p/protobuf/protobuf-3.12.4-GCCcore-10.3.0.eb deleted file mode 100644 index 4146f54439f306ebbbc8911e8cbd45ebbf0204b9..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/protobuf/protobuf-3.12.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'protobuf' -version = '3.12.4' - -homepage = 'https://github.com/google/protobuf/' -description = """Google Protocol Buffers""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/google/protobuf/archive/v%(version)s/'] -sources = ['%(name)s-%(version)s_jsc.tar.gz'] -checksums = ['512e5a674bf31f8b7928a64d8adf73ee67b8fe88339ad29adaa3b84dbaa570d8'] - -# This tarball contains gmock and gtest, which are otherwise downloaded by autogen.sh -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -start_dir = 'cmake' - -configopts = '-Dprotobuf_BUILD_TESTS=OFF -DBUILD_SHARED_LIBS=ON' - -sanity_check_paths = { - 'files': ['bin/protoc', 'lib64/libprotobuf.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/protobuf/protobuf-3.12.4-GCCcore-9.3.0.eb b/Golden_Repo/p/protobuf/protobuf-3.12.4-GCCcore-9.3.0.eb deleted file mode 100644 index 10036260ba2e005739d3ab34f3e60346f90d0047..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/protobuf/protobuf-3.12.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'protobuf' -version = '3.12.4' - -homepage = 'https://github.com/google/protobuf/' -description = """Google Protocol Buffers""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/google/protobuf/archive/v%(version)s/'] -sources = ['%(name)s-%(version)s_jsc.tar.gz'] -checksums = ['512e5a674bf31f8b7928a64d8adf73ee67b8fe88339ad29adaa3b84dbaa570d8'] - -# This tarball contains gmock and gtest, which are otherwise downloaded by autogen.sh -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -start_dir = 'cmake' - -configopts = '-Dprotobuf_BUILD_TESTS=OFF -DBUILD_SHARED_LIBS=ON' - -sanity_check_paths = { - 'files': ['bin/protoc', 'lib64/libprotobuf.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/protobuf/protobuf-3.13.0-GCCcore-10.3.0.eb b/Golden_Repo/p/protobuf/protobuf-3.13.0-GCCcore-10.3.0.eb deleted file mode 100644 index 109e003e75a76015f039bb7f49d04a4d730a4c86..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/protobuf/protobuf-3.13.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'protobuf' -version = '3.13.0' - -homepage = 'https://github.com/google/protobuf/' -description = """Google Protocol Buffers""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/google/protobuf/archive/v%(version)s/'] -sources = ['%(name)s-%(version)s_jsc.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -start_dir = 'cmake' - -configopts = '-Dprotobuf_BUILD_TESTS=OFF -DBUILD_SHARED_LIBS=ON' - -sanity_check_paths = { - 'files': ['bin/protoc', 'lib64/libprotobuf.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/protobuf/protobuf-3.13.0-GCCcore-9.3.0.eb b/Golden_Repo/p/protobuf/protobuf-3.13.0-GCCcore-9.3.0.eb deleted file mode 100644 index 700b68c238bfc2875a9ee93e1056a6e8a5e915ca..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/protobuf/protobuf-3.13.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'protobuf' -version = '3.13.0' - -homepage = 'https://github.com/google/protobuf/' -description = """Google Protocol Buffers""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/google/protobuf/archive/v%(version)s/'] -sources = ['%(name)s-%(version)s_jsc.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -start_dir = 'cmake' - -configopts = '-Dprotobuf_BUILD_TESTS=OFF -DBUILD_SHARED_LIBS=ON' - -sanity_check_paths = { - 'files': ['bin/protoc', 'lib64/libprotobuf.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/p/pscom/pscom-5.4-default-CUDA-11.3.eb b/Golden_Repo/p/pscom/pscom-5.4-default-CUDA-11.3.eb deleted file mode 100644 index 46cde025d23945b6e4a3f9943afbbd641ff99391..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pscom/pscom-5.4-default-CUDA-11.3.eb +++ /dev/null @@ -1,52 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'pscom' -local_cuda_ver = '11.3' -# Create drop-in replacement version that ensures overriding behaviour -version = f'5.4-default-CUDA-{local_cuda_ver}' -local_realversion = '5.4.7-1' -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = ['https://github.com/ParaStation/%(name)s/archive/'] -sources = ['%s.tar.gz' % local_realversion] - -builddependencies = [ - ('binutils', '2.36.1'), - ('popt', '1.16'), - ('CUDA', local_cuda_ver), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('UCX', '1.10.1'), -] - -build_type = 'RelWithDebInfo' - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '-DCUDA_ENABLED=ON' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % local_realversion, -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/pscom/pscom-5.4-default.eb b/Golden_Repo/p/pscom/pscom-5.4-default.eb deleted file mode 100644 index f1307cec0d00d169e56a61c31a27638025d708d5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pscom/pscom-5.4-default.eb +++ /dev/null @@ -1,49 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'pscom' -# Create drop-in replacement version that ensures overriding behaviour -version = "5.4-default" -local_realversion = "5.4.6-1" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = ['https://github.com/ParaStation/%(name)s/archive/'] -sources = ['%s.tar.gz' % local_realversion] - -builddependencies = [ - # Fails with binutils 2.34 - ('binutils', '2.32'), - ('popt', '1.16'), - ('CUDA', '11.0'), -] - -dependencies = [ - ('UCX', '1.8.1'), -] - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '--enable-cuda --enable-ucp' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % local_realversion, -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/pscom/pscom-5.4.6-1.eb b/Golden_Repo/p/pscom/pscom-5.4.6-1.eb deleted file mode 100644 index a0a61bfda8223ba64e84cc94be72c7a7d67ee37d..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pscom/pscom-5.4.6-1.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'pscom' -version = "5.4.6-1" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = ['https://github.com/ParaStation/%(name)s/archive/'] -sources = ['%s.tar.gz' % version] - -builddependencies = [ - # Fails with binutils 2.34 - ('binutils', '2.32'), - ('popt', '1.16'), - ('CUDA', '11.0'), -] - -dependencies = [ - ('UCX', '1.8.1'), -] - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '--enable-cuda --enable-ucp' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % version, -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/pscom/pscom-5.4.7-1.eb b/Golden_Repo/p/pscom/pscom-5.4.7-1.eb deleted file mode 100644 index 05b339ecce26accee7c60c30b0d1c73c588c5299..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pscom/pscom-5.4.7-1.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'pscom' -version = "5.4.7-1" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = ['https://github.com/ParaStation/%(name)s/archive/'] -sources = ['%%(name)s-%s.tar.bz2' % version] - -builddependencies = [ - ('popt', '1.16'), - ('CUDA', '11.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('UCX', '1.9.0'), -] - -build_type = 'RelWithDebInfo' - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '-DCUDA_ENABLED=ON' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % version, -} - -moduleclass = 'tools' diff --git a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb b/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb deleted file mode 100644 index 0e7bbd0c910d76b91bd4be86146e1af73d533b00..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default ParaStationMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', -} - -moduleclass = 'system' diff --git a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb b/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb deleted file mode 100644 index 7ea994d41e0c1673623658970add8d400af09e1e..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'CUDA' - -homepage = '' -description = 'This is a module to load the default ParaStationMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', -} - -moduleclass = 'system' diff --git a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-UCX-plain.eb b/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-UCX-plain.eb deleted file mode 100644 index f60132e1fa8855e8aad49f981013b664791f9ce7..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-UCX-plain.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'UCX-plain' - -homepage = '' -description = 'This is a module to load the ParaStationMPI configuration. It enables UCX, without further tuning' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_OPENIB': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', -} - -moduleclass = 'system' diff --git a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb b/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb deleted file mode 100644 index bb11864402e5492fba08fd2660ff7a30d7b40fc5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default ParaStationMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', -} - -moduleclass = 'system' diff --git a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb b/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb deleted file mode 100644 index 879c26739d02b151d0d434045bde928d852e4981..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'CUDA' - -homepage = '' -description = 'This is a module to load the default ParaStationMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', -} - -moduleclass = 'system' diff --git a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-UCX-plain.eb b/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-UCX-plain.eb deleted file mode 100644 index ba74304a955e515bdc7707eef7bc005f3514308a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-UCX-plain.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'UCX-plain' - -homepage = '' -description = 'This is a module to load the ParaStationMPI configuration. It enables UCX, without further tuning' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_OPENIB': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', -} - -moduleclass = 'system' diff --git a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-plain.eb b/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-plain.eb deleted file mode 100644 index f6bc981a3d5c0a014f8175fae51ccbb898a63af9..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-mt-plain.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'plain' - -homepage = '' -description = """This is a module to load the default ParaStationMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_HARD_ABORT': '1', -} - -moduleclass = 'system' diff --git a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-plain.eb b/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-plain.eb deleted file mode 100644 index 6e5f31e734db6b0e5d80047b01eb45c6a2886f88..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi-settings/psmpi-settings-5.4-plain.eb +++ /dev/null @@ -1,21 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'plain' - -homepage = '' -description = """This is a module to load the default ParaStationMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_HARD_ABORT': '1', -} - -moduleclass = 'system' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.6-1_ime.patch b/Golden_Repo/p/psmpi/psmpi-5.4.6-1_ime.patch deleted file mode 100644 index 20478287dfbcf000d53736ab4d8512c2403ed5df..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.6-1_ime.patch +++ /dev/null @@ -1,47206 +0,0 @@ -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/aclocal.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/aclocal.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/aclocal.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/aclocal.m4 2020-07-16 11:30:46.936365369 +0200 -@@ -76,8 +76,7 @@ - : ${AR=ar} - - AC_CACHE_CHECK([the archiver ($AR) interface], [am_cv_ar_interface], -- [AC_LANG_PUSH([C]) -- am_cv_ar_interface=ar -+ [am_cv_ar_interface=ar - AC_COMPILE_IFELSE([AC_LANG_SOURCE([[int some_variable = 0;]])], - [am_ar_try='$AR cru libconftest.a conftest.$ac_objext >&AS_MESSAGE_LOG_FD' - AC_TRY_EVAL([am_ar_try]) -@@ -94,7 +93,7 @@ - fi - rm -f conftest.lib libconftest.a - ]) -- AC_LANG_POP([C])]) -+ ]) - - case $am_cv_ar_interface in - ar) -@@ -163,9 +162,10 @@ - # configured tree to be moved without reconfiguration. - - AC_DEFUN([AM_AUX_DIR_EXPAND], --[AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl --# Expand $ac_aux_dir to an absolute path. --am_aux_dir=`cd "$ac_aux_dir" && pwd` -+[dnl Rely on autoconf to set up CDPATH properly. -+AC_PREREQ([2.50])dnl -+# expand $ac_aux_dir to an absolute path -+am_aux_dir=`cd $ac_aux_dir && pwd` - ]) - - # AM_CONDITIONAL -*- Autoconf -*- -@@ -467,12 +467,6 @@ - # This macro actually does too much. Some checks are only needed if - # your package does certain things. But this isn't really a big deal. - --dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O. --m4_define([AC_PROG_CC], --m4_defn([AC_PROG_CC]) --[_AM_PROG_CC_C_O --]) -- - # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) - # AM_INIT_AUTOMAKE([OPTIONS]) - # ----------------------------------------------- -@@ -548,8 +542,8 @@ - # <https://lists.gnu.org/archive/html/automake/2012-07/msg00001.html> - # <https://lists.gnu.org/archive/html/automake/2012-07/msg00014.html> - AC_SUBST([mkdir_p], ['$(MKDIR_P)']) --# We need awk for the "check" target (and possibly the TAP driver). The --# system "awk" is bad on some platforms. -+# We need awk for the "check" target. The system "awk" is bad on -+# some platforms. - AC_REQUIRE([AC_PROG_AWK])dnl - AC_REQUIRE([AC_PROG_MAKE_SET])dnl - AC_REQUIRE([AM_SET_LEADING_DOT])dnl -@@ -581,51 +575,6 @@ - AC_CONFIG_COMMANDS_PRE(dnl - [m4_provide_if([_AM_COMPILER_EXEEXT], - [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl -- --# POSIX will say in a future version that running "rm -f" with no argument --# is OK; and we want to be able to make that assumption in our Makefile --# recipes. So use an aggressive probe to check that the usage we want is --# actually supported "in the wild" to an acceptable degree. --# See automake bug#10828. --# To make any issue more visible, cause the running configure to be aborted --# by default if the 'rm' program in use doesn't match our expectations; the --# user can still override this though. --if rm -f && rm -fr && rm -rf; then : OK; else -- cat >&2 <<'END' --Oops! -- --Your 'rm' program seems unable to run without file operands specified --on the command line, even when the '-f' option is present. This is contrary --to the behaviour of most rm programs out there, and not conforming with --the upcoming POSIX standard: <http://austingroupbugs.net/view.php?id=542> -- --Please tell bug-automake@gnu.org about your system, including the value --of your $PATH and any error possibly output before this message. This --can help us improve future automake versions. -- --END -- if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then -- echo 'Configuration will proceed anyway, since you have set the' >&2 -- echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 -- echo >&2 -- else -- cat >&2 <<'END' --Aborting the configuration process, to ensure you take notice of the issue. -- --You can download and install GNU coreutils to get an 'rm' implementation --that behaves properly: <https://www.gnu.org/software/coreutils/>. -- --If you want to complete the configuration process using your problematic --'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM --to "yes", and re-run configure. -- --END -- AC_MSG_ERROR([Your 'rm' program is bad, sorry.]) -- fi --fi --dnl The trailing newline in this macro's definition is deliberate, for --dnl backward compatibility and to allow trailing 'dnl'-style comments --dnl after the AM_INIT_AUTOMAKE invocation. See automake bug#16841. - ]) - - dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not -@@ -634,6 +583,7 @@ - m4_define([_AC_COMPILER_EXEEXT], - m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) - -+ - # When config.status generates a header, we must update the stamp-h file. - # This file resides in the same directory as the config header - # that is generated. The stamp files are numbered to have different names. -@@ -666,7 +616,7 @@ - # Define $install_sh. - AC_DEFUN([AM_PROG_INSTALL_SH], - [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl --if test x"${install_sh+set}" != xset; then -+if test x"${install_sh}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; -@@ -774,6 +724,38 @@ - AC_SUBST([am__include])]) - AC_SUBST([am__quote])]) - -+# Copyright (C) 1999-2013 Free Software Foundation, Inc. -+# -+# This file is free software; the Free Software Foundation -+# gives unlimited permission to copy and/or distribute it, -+# with or without modifications, as long as this notice is preserved. -+ -+# AM_PROG_CC_C_O -+# -------------- -+# Like AC_PROG_CC_C_O, but changed for automake. -+AC_DEFUN([AM_PROG_CC_C_O], -+[AC_REQUIRE([AC_PROG_CC_C_O])dnl -+AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -+AC_REQUIRE_AUX_FILE([compile])dnl -+# FIXME: we rely on the cache variable name because -+# there is no other way. -+set dummy $CC -+am_cc=`echo $[2] | sed ['s/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/']` -+eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -+if test "$am_t" != yes; then -+ # Losing compiler, so override with the script. -+ # FIXME: It is wrong to rewrite CC. -+ # But if we don't then we get into trouble of one sort or another. -+ # A longer-term fix would be to have automake use am__CC in this case, -+ # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" -+ CC="$am_aux_dir/compile $CC" -+fi -+dnl Make sure AC_PROG_CC is never called again, or it will override our -+dnl setting of CC. -+m4_define([AC_PROG_CC], -+ [m4_fatal([AC_PROG_CC cannot be called after AM_PROG_CC_C_O])]) -+]) -+ - # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- - - # Copyright (C) 1997-2018 Free Software Foundation, Inc. -@@ -843,71 +825,6 @@ - # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. - AC_DEFUN([_AM_IF_OPTION], - [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) -- --# Copyright (C) 1999-2018 Free Software Foundation, Inc. --# --# This file is free software; the Free Software Foundation --# gives unlimited permission to copy and/or distribute it, --# with or without modifications, as long as this notice is preserved. -- --# _AM_PROG_CC_C_O --# --------------- --# Like AC_PROG_CC_C_O, but changed for automake. We rewrite AC_PROG_CC --# to automatically call this. --AC_DEFUN([_AM_PROG_CC_C_O], --[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl --AC_REQUIRE_AUX_FILE([compile])dnl --AC_LANG_PUSH([C])dnl --AC_CACHE_CHECK( -- [whether $CC understands -c and -o together], -- [am_cv_prog_cc_c_o], -- [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])]) -- # Make sure it works both with $CC and with simple cc. -- # Following AC_PROG_CC_C_O, we do the test twice because some -- # compilers refuse to overwrite an existing .o file with -o, -- # though they will create one. -- am_cv_prog_cc_c_o=yes -- for am_i in 1 2; do -- if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \ -- && test -f conftest2.$ac_objext; then -- : OK -- else -- am_cv_prog_cc_c_o=no -- break -- fi -- done -- rm -f core conftest* -- unset am_i]) --if test "$am_cv_prog_cc_c_o" != yes; then -- # Losing compiler, so override with the script. -- # FIXME: It is wrong to rewrite CC. -- # But if we don't then we get into trouble of one sort or another. -- # A longer-term fix would be to have automake use am__CC in this case, -- # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" -- CC="$am_aux_dir/compile $CC" --fi --AC_LANG_POP([C])]) -- --# For backward compatibility. --AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])]) -- --# Copyright (C) 2001-2018 Free Software Foundation, Inc. --# --# This file is free software; the Free Software Foundation --# gives unlimited permission to copy and/or distribute it, --# with or without modifications, as long as this notice is preserved. -- --# AM_RUN_LOG(COMMAND) --# ------------------- --# Run COMMAND, save the exit status in ac_status, and log it. --# (This has been adapted from Autoconf's _AC_RUN_LOG macro.) --AC_DEFUN([AM_RUN_LOG], --[{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD -- ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD -- ac_status=$? -- echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD -- (exit $ac_status); }]) -- - # Check to make sure that the build environment is sane. -*- Autoconf -*- - - # Copyright (C) 1996-2018 Free Software Foundation, Inc. -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime.c psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime.c ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime.c 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime.c 2020-07-16 10:45:43.396074000 +0200 -@@ -13,8 +13,8 @@ - - struct ADIOI_Fns_struct ADIO_IME_operations = { - ADIOI_IME_Open, /* Open */ -- ADIOI_SCALEABLE_OpenColl, /* OpenColl */ /*XXX*/ -- ADIOI_IME_ReadContig, /* ReadContig */ -+ ADIOI_GEN_OpenColl, /* OpenColl */ -+ ADIOI_IME_ReadContig, /* ReadContig */ - ADIOI_IME_WriteContig, /* WriteContig */ - ADIOI_GEN_ReadStridedColl, /* ReadStridedColl */ - ADIOI_GEN_WriteStridedColl, /* WriteStridedColl */ -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_common.c psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_common.c ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_common.c 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_common.c 2020-07-16 10:45:43.398781000 +0200 -@@ -61,24 +61,38 @@ - MPI_Attr_put(MPI_COMM_SELF, ADIOI_IME_Initialized, (void *) 0); - } - --/* Return an IME-compatible filename (add 'ime:' prefix). -- * New filename must be free'd by the user */ --char *ADIOI_IME_Add_prefix(const char *filename) -+/** -+ * Return an IME-compatible filename -+ * An absolute BFS path will get added the 'ime:' prefix. -+ * A path on ime-fuse will be converted to a relative ime path, -+ * with the ime:/ prefix -+ * The returned filename must be free'd by the user -+ */ -+char *ADIOI_IME_Convert_filename(const char *filename) - { -- static char myname[] = "ADIOI_IME_ADD_PREFIX"; -- size_t f_len = strlen(filename) + 1; -- char *ime_filename = ADIOI_Malloc(f_len + ADIOI_IME_PREFIX_LEN); -+ static char myname[] = "ADIOI_IME_CONVERT_FILENAME"; - -- if (!ime_filename) { -+#if (IME_NATIVE_API_VERSION >= 131) -+ bool is_fuse = ime_native_is_fuse_path_and_convert(filename, NULL); -+ if (is_fuse) { -+ return ADIOI_Strdup(filename); -+ } -+#endif - -+ size_t f_len = strlen(filename) + 1; -+ char *ime_filename = ADIOI_Malloc(f_len + IME_FILE_PREFIX_LEN_NO_FWD_SLASH); -+ if (!ime_filename) { - MPIO_Err_create_code(MPI_SUCCESS, - MPIR_ERR_FATAL, -- myname, __LINE__, MPI_ERR_UNKNOWN, "Error allocating memory", 0); -+ myname, __LINE__, MPI_ERR_UNKNOWN, -+ "Error allocating memory", 0); - - return NULL; - } - -- ADIOI_Strncpy(ime_filename, ADIOI_IME_PREFIX, ADIOI_IME_PREFIX_LEN); -- ADIOI_Strncpy((ime_filename + ADIOI_IME_PREFIX_LEN), filename, f_len); -+ ADIOI_Strncpy(ime_filename, DEFAULT_IME_PREFIX_NO_FWD_SLASH, -+ IME_FILE_PREFIX_LEN_NO_FWD_SLASH); -+ ADIOI_Strncpy((ime_filename + IME_FILE_PREFIX_LEN_NO_FWD_SLASH), -+ filename, f_len); - return ime_filename; - } -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_common.h psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_common.h ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_common.h 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_common.h 2020-07-16 10:45:43.400059000 +0200 -@@ -20,5 +20,5 @@ - void ADIOI_IME_End(int *error_code); - int ADIOI_IME_End_call(MPI_Comm comm, int keyval, void *attribute_val, void *extra_state); - --char *ADIOI_IME_Add_prefix(const char *filename); -+char *ADIOI_IME_Convert_filename(const char *filename); - #endif /* AD_IME_COMMON_H_INCLUDED */ -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_delete.c psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_delete.c ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_delete.c 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_delete.c 2020-07-16 10:45:43.401254000 +0200 -@@ -16,7 +16,7 @@ - int ret; - static char myname[] = "ADIOI_IME_DELETE"; - -- char *ime_filename = ADIOI_IME_Add_prefix(filename); -+ char *ime_filename = ADIOI_IME_Convert_filename(filename); - ret = ime_native_unlink(ime_filename); - ADIOI_Free(ime_filename); - if (ret) -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime.h psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime.h ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime.h 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime.h 2020-07-16 10:45:43.397428000 +0200 -@@ -10,9 +10,7 @@ - #define AD_IME_H_INCLUDED - - #include "adio.h" --#ifdef HAVE_IME_NATIVE_H - #include "ime_native.h" --#endif - - #define ADIOI_IME_PREFIX "ime:" - #define ADIOI_IME_PREFIX_LEN (sizeof(ADIOI_IME_PREFIX) - 1) -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_open.c psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_open.c ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_open.c 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/ad_ime/ad_ime_open.c 2020-07-16 10:45:43.402536000 +0200 -@@ -71,7 +71,7 @@ - return; - } - -- ime_fs->ime_filename = ADIOI_IME_Add_prefix(fd->filename); -+ ime_fs->ime_filename = ADIOI_IME_Convert_filename(fd->filename); - - /* all processes open the file */ - ret = ime_native_open(ime_fs->ime_filename, amode, perm); -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/common/ad_fstype.c psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/common/ad_fstype.c ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/common/ad_fstype.c 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/common/ad_fstype.c 2020-07-16 10:45:51.383949936 +0200 -@@ -87,6 +87,20 @@ - #define GPFS_SUPER_MAGIC 0x47504653 - #endif - -+# if defined(ROMIO_IME) -+/* fuse super magic is somehow missing in system includes */ -+# if !defined(FUSE_SUPER_MAGIC) -+# define FUSE_SUPER_MAGIC 0x65735546 -+# endif -+# include "ime_native.h" -+# if (IME_NATIVE_API_VERSION) >= 131 -+# include "ime_ioctl.h" -+ -+/* Disable auto-switching from posix to IME */ -+#define IME_NO_AUTO_NATIVE "IME_NO_AUTO_NATIVE" -+# endif -+# endif /* ROMIO_IME */ -+ - #ifdef ROMIO_HAVE_STRUCT_STATVFS_WITH_F_BASETYPE - #ifdef HAVE_SYS_STATVFS_H - #include <sys/statvfs.h> -@@ -419,6 +433,39 @@ - } - #endif - -+#ifdef FUSE_SUPER_MAGIC -+ if (fsbuf.f_type == FUSE_SUPER_MAGIC) { -+ /* fuse does not allow to override FUSE_SUPER_MAGIC -+ * Any file system that is fused based needs to hook in -+ * here and use its own ioctl. -+ */ -+ #if defined ROMIO_IME && defined IM_FIOC_GET_F_TYPE -+ char *dir; -+ ADIO_FileSysType_parentdir(filename, &dir); -+ int fd = open(dir, O_RDONLY); -+ if (fd == -1) -+ { -+ /* dir should typically work, but try to fail back to -+ * filename if it somehow failed -+ */ -+ fd = open(filename, O_RDONLY); -+ } -+ -+ if (fd >= 0) { -+ uint32_t f_type; -+ int rc = ioctl(fd, IM_FIOC_GET_F_TYPE, &f_type); -+ close(fd); -+ if (rc == 0 && f_type == IME_SUPER_MAGIC && -+ getenv(IME_NO_AUTO_NATIVE) == NULL) { -+ *fstype = ADIO_IME; -+ return; -+ } -+ } -+ #endif /* ROMIO_IME */ -+ } -+#endif /* FUSE_SUPER_MAGIC */ -+ -+ - #endif /*ROMIO_HAVE_STRUCT_STATFS_WITH_F_TYPE */ - - #ifdef ROMIO_UFS -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/include/romioconf.h.in psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/include/romioconf.h.in ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/adio/include/romioconf.h.in 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/adio/include/romioconf.h.in 2020-07-16 10:48:35.333084119 +0200 -@@ -244,7 +244,8 @@ - /* Define if int smaller than pointer */ - #undef INT_LT_POINTER - --/* Define to the sub-directory where libtool stores uninstalled libraries. */ -+/* Define to the sub-directory in which libtool stores uninstalled libraries. -+ */ - #undef LT_OBJDIR - - /* Define if using MPICH */ -@@ -292,6 +293,9 @@ - /* Define if usleep needs a declaration */ - #undef NEEDS_USLEEP_DECL - -+/* Define to 1 if your C compiler doesn't accept -c and -o together. */ -+#undef NO_MINUS_C_MINUS_O -+ - /* Define if no MPI type is contig */ - #undef NO_MPI_SGI_type_is_contig - -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/compile psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/compile ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/compile 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/compile 2020-07-16 10:48:35.336090000 +0200 -@@ -255,8 +255,7 @@ - echo "compile $scriptversion" - exit $? - ;; -- cl | *[/\\]cl | cl.exe | *[/\\]cl.exe | \ -- icl | *[/\\]icl | icl.exe | *[/\\]icl.exe ) -+ cl | *[/\\]cl | cl.exe | *[/\\]cl.exe ) - func_cl_wrapper "$@" # Doesn't return... - ;; - esac -@@ -343,6 +342,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/config.guess psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/config.guess ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/config.guess 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/config.guess 2020-07-16 10:48:35.342961565 +0200 -@@ -610,9 +610,8 @@ - else - IBM_ARCH=powerpc - fi -- if [ -x /usr/bin/lslpp ] ; then -- IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | -- awk -F: '{ print $3 }' | sed s/[0-9]*$/0/` -+ if [ -x /usr/bin/oslevel ] ; then -+ IBM_REV=`/usr/bin/oslevel` - else - IBM_REV="$UNAME_VERSION.$UNAME_RELEASE" - fi -@@ -711,12 +710,12 @@ - # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess - # => hppa64-hp-hpux11.23 - -- if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | -+ if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | - grep -q __LP64__ - then -- HP_ARCH=hppa2.0w -+ HP_ARCH="hppa2.0w" - else -- HP_ARCH=hppa64 -+ HP_ARCH="hppa64" - fi - fi - echo "$HP_ARCH"-hp-hpux"$HPUX_REV" -@@ -1125,7 +1124,7 @@ - # uname -m prints for DJGPP always 'pc', but it prints nothing about - # the processor, so we play safe by assuming i586. - # Note: whatever this is, it MUST be the same as what config.sub -- # prints for the "djgpp" host, or else GDB configure will decide that -+ # prints for the "djgpp" host, or else GDB configury will decide that - # this is a cross-build. - echo i586-pc-msdosdjgpp - exit ;; -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/config.sub psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/config.sub ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/config.sub 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/config.sub 2020-07-16 10:48:35.348164542 +0200 -@@ -25,7 +25,7 @@ - # of the GNU General Public License, version 3 ("GPLv3"). - - --# Please send patches to <config-patches@gnu.org>. -+# Please send patches with a ChangeLog entry to config-patches@gnu.org. - # - # Configuration subroutine to validate and canonicalize a configuration type. - # Supply the specified configuration type as an argument. -@@ -53,7 +53,8 @@ - me=`echo "$0" | sed -e 's,.*/,,'` - - usage="\ --Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS -+Usage: $0 [OPTION] CPU-MFR-OPSYS -+ $0 [OPTION] ALIAS - - Canonicalize a configuration name. - -@@ -116,8 +117,8 @@ - case $maybe_os in - nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ - linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ -- knetbsd*-gnu* | netbsd*-gnu* | netbsd*-eabi* | \ -- kopensolaris*-gnu* | cloudabi*-eabi* | \ -+ knetbsd*-gnu* | netbsd*-gnu* | \ -+ kopensolaris*-gnu* | \ - storm-chaos* | os2-emx* | rtmk-nova*) - os=-$maybe_os - basic_machine=`echo "$1" | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` -@@ -251,18 +252,16 @@ - | arc | arceb \ - | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ - | avr | avr32 \ -- | ba \ - | be32 | be64 \ - | bfin \ -- | c4x | c8051 | clipper \ -+ | c4x | clipper \ - | d10v | d30v | dlx | dsp16xx \ -- | e2k | epiphany \ -- | fido | fr30 | frv | ft32 \ -+ | epiphany \ -+ | fido | fr30 | frv \ - | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ - | hexagon \ -- | i370 | i860 | i960 | ia16 | ia64 \ -+ | i370 | i860 | i960 | ia64 \ - | ip2k | iq2000 \ -- | k1om \ - | le32 | le64 \ - | lm32 \ - | m32c | m32r | m32rle | m68000 | m68k | m88k \ -@@ -280,10 +279,8 @@ - | mips64vr5900 | mips64vr5900el \ - | mipsisa32 | mipsisa32el \ - | mipsisa32r2 | mipsisa32r2el \ -- | mipsisa32r6 | mipsisa32r6el \ - | mipsisa64 | mipsisa64el \ - | mipsisa64r2 | mipsisa64r2el \ -- | mipsisa64r6 | mipsisa64r6el \ - | mipsisa64sb1 | mipsisa64sb1el \ - | mipsisa64sr71k | mipsisa64sr71kel \ - | mipsr5900 | mipsr5900el \ -@@ -326,10 +323,7 @@ - c6x) - basic_machine=tic6x-unknown - ;; -- leon|leon[3-9]) -- basic_machine=sparc-$basic_machine -- ;; -- m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) -+ m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) - basic_machine=$basic_machine-unknown - os=-none - ;; -@@ -374,20 +368,18 @@ - | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ - | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ - | avr-* | avr32-* \ -- | ba-* \ - | be32-* | be64-* \ - | bfin-* | bs2000-* \ - | c[123]* | c30-* | [cjt]90-* | c4x-* \ -- | c8051-* | clipper-* | craynv-* | cydra-* \ -+ | clipper-* | craynv-* | cydra-* \ - | d10v-* | d30v-* | dlx-* \ -- | e2k-* | elxsi-* \ -+ | elxsi-* \ - | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ - | h8300-* | h8500-* \ - | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ - | hexagon-* \ -- | i*86-* | i860-* | i960-* | ia16-* | ia64-* \ -+ | i*86-* | i860-* | i960-* | ia64-* \ - | ip2k-* | iq2000-* \ -- | k1om-* \ - | le32-* | le64-* \ - | lm32-* \ - | m32c-* | m32r-* | m32rle-* \ -@@ -407,10 +399,8 @@ - | mips64vr5900-* | mips64vr5900el-* \ - | mipsisa32-* | mipsisa32el-* \ - | mipsisa32r2-* | mipsisa32r2el-* \ -- | mipsisa32r6-* | mipsisa32r6el-* \ - | mipsisa64-* | mipsisa64el-* \ - | mipsisa64r2-* | mipsisa64r2el-* \ -- | mipsisa64r6-* | mipsisa64r6el-* \ - | mipsisa64sb1-* | mipsisa64sb1el-* \ - | mipsisa64sr71k-* | mipsisa64sr71kel-* \ - | mipsr5900-* | mipsr5900el-* \ -@@ -422,19 +412,16 @@ - | nios-* | nios2-* | nios2eb-* | nios2el-* \ - | none-* | np1-* | ns16k-* | ns32k-* \ - | open8-* \ -- | or1k*-* \ - | orion-* \ - | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ - | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ -- | pru-* \ - | pyramid-* \ -- | riscv32-* | riscv64-* \ - | rl78-* | romp-* | rs6000-* | rx-* \ - | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ - | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ - | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ - | sparclite-* \ -- | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \ -+ | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \ - | tahoe-* \ - | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ - | tile*-* \ -@@ -442,8 +429,6 @@ - | ubicom32-* \ - | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ - | vax-* \ -- | visium-* \ -- | wasm32-* \ - | we32k-* \ - | x86-* | x86_64-* | xc16x-* | xps100-* \ - | xstormy16-* | xtensa*-* \ -@@ -520,9 +505,6 @@ - basic_machine=i386-pc - os=-aros - ;; -- asmjs) -- basic_machine=asmjs-unknown -- ;; - aux) - basic_machine=m68k-apple - os=-aux -@@ -816,7 +798,7 @@ - os=-mingw64 - ;; - mingw32) -- basic_machine=i686-pc -+ basic_machine=i386-pc - os=-mingw32 - ;; - mingw32ce) -@@ -844,10 +826,6 @@ - basic_machine=powerpc-unknown - os=-morphos - ;; -- moxiebox) -- basic_machine=moxie-unknown -- os=-moxiebox -- ;; - msdos) - basic_machine=i386-pc - os=-msdos -@@ -856,7 +834,7 @@ - basic_machine=`echo "$basic_machine" | sed -e 's/ms1-/mt-/'` - ;; - msys) -- basic_machine=i686-pc -+ basic_machine=i386-pc - os=-msys - ;; - mvs) -@@ -943,9 +921,6 @@ - nsv-tandem) - basic_machine=nsv-tandem - ;; -- nsx-tandem) -- basic_machine=nsx-tandem -- ;; - op50n-* | op60c-*) - basic_machine=hppa1.1-oki - os=-proelf -@@ -1030,7 +1005,7 @@ - ppc-* | ppcbe-*) - basic_machine=powerpc-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; -- ppcle | powerpclittle) -+ ppcle | powerpclittle | ppc-le | powerpc-little) - basic_machine=powerpcle-unknown - ;; - ppcle-* | powerpclittle-*) -@@ -1520,8 +1495,6 @@ - ;; - -nacl*) - ;; -- -ios) -- ;; - -none) - ;; - *) -@@ -1562,9 +1535,6 @@ - c4x-* | tic4x-*) - os=-coff - ;; -- c8051-*) -- os=-elf -- ;; - hexagon-*) - os=-elf - ;; -@@ -1608,6 +1578,9 @@ - mips*-*) - os=-elf - ;; -+ or1k-*) -+ os=-elf -+ ;; - or32-*) - os=-coff - ;; -@@ -1617,9 +1590,6 @@ - sparc-* | *-sun) - os=-sunos4.1.1 - ;; -- pru-*) -- os=-elf -- ;; - *-be) - os=-beos - ;; -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/depcomp psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/depcomp ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/depcomp 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/depcomp 2020-07-16 10:48:35.350075712 +0200 -@@ -786,6 +786,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/install-sh psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/install-sh ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/install-sh 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/install-sh 2020-07-16 10:48:35.352538233 +0200 -@@ -41,15 +41,19 @@ - # This script is compatible with the BSD install script, but was written - # from scratch. - --tab=' ' - nl=' - ' --IFS=" $tab$nl" -+IFS=" "" $nl" - --# Set DOITPROG to "echo" to test this script. -+# set DOITPROG to echo to test this script - -+# Don't use :- since 4.3BSD and earlier shells don't like it. - doit=${DOITPROG-} --doit_exec=${doit:-exec} -+if test -z "$doit"; then -+ doit_exec=exec -+else -+ doit_exec=$doit -+fi - - # Put in absolute file names if you don't have them in your path; - # or use environment vars. -@@ -64,6 +68,17 @@ - rmprog=${RMPROG-rm} - stripprog=${STRIPPROG-strip} - -+posix_glob='?' -+initialize_posix_glob=' -+ test "$posix_glob" != "?" || { -+ if (set -f) 2>/dev/null; then -+ posix_glob= -+ else -+ posix_glob=: -+ fi -+ } -+' -+ - posix_mkdir= - - # Desired mode of installed file. -@@ -82,7 +97,7 @@ - dst_arg= - - copy_on_change=false --is_target_a_directory=possibly -+no_target_directory= - - usage="\ - Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE -@@ -122,57 +137,46 @@ - -d) dir_arg=true;; - - -g) chgrpcmd="$chgrpprog $2" -- shift;; -+ shift;; - - --help) echo "$usage"; exit $?;; - - -m) mode=$2 -- case $mode in -- *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*) -- echo "$0: invalid mode: $mode" >&2 -- exit 1;; -- esac -- shift;; -+ case $mode in -+ *' '* | *' '* | *' -+'* | *'*'* | *'?'* | *'['*) -+ echo "$0: invalid mode: $mode" >&2 -+ exit 1;; -+ esac -+ shift;; - - -o) chowncmd="$chownprog $2" -- shift;; -+ shift;; - - -s) stripcmd=$stripprog;; - -- -t) -- is_target_a_directory=always -- dst_arg=$2 -- # Protect names problematic for 'test' and other utilities. -- case $dst_arg in -- -* | [=\(\)!]) dst_arg=./$dst_arg;; -- esac -- shift;; -+ -t) dst_arg=$2 -+ # Protect names problematic for 'test' and other utilities. -+ case $dst_arg in -+ -* | [=\(\)!]) dst_arg=./$dst_arg;; -+ esac -+ shift;; - -- -T) is_target_a_directory=never;; -+ -T) no_target_directory=true;; - - --version) echo "$0 $scriptversion"; exit $?;; - -- --) shift -- break;; -+ --) shift -+ break;; - -- -*) echo "$0: invalid option: $1" >&2 -- exit 1;; -+ -*) echo "$0: invalid option: $1" >&2 -+ exit 1;; - - *) break;; - esac - shift - done - --# We allow the use of options -d and -T together, by making -d --# take the precedence; this is for compatibility with GNU install. -- --if test -n "$dir_arg"; then -- if test -n "$dst_arg"; then -- echo "$0: target directory not allowed when installing a directory." >&2 -- exit 1 -- fi --fi -- - if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then - # When -d is used, all remaining arguments are directories to create. - # When -t is used, the destination is already specified. -@@ -204,15 +208,6 @@ - fi - - if test -z "$dir_arg"; then -- if test $# -gt 1 || test "$is_target_a_directory" = always; then -- if test ! -d "$dst_arg"; then -- echo "$0: $dst_arg: Is not a directory." >&2 -- exit 1 -- fi -- fi --fi -- --if test -z "$dir_arg"; then - do_exit='(exit $ret); exit $ret' - trap "ret=129; $do_exit" 1 - trap "ret=130; $do_exit" 2 -@@ -228,16 +223,16 @@ - - *[0-7]) - if test -z "$stripcmd"; then -- u_plus_rw= -+ u_plus_rw= - else -- u_plus_rw='% 200' -+ u_plus_rw='% 200' - fi - cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; - *) - if test -z "$stripcmd"; then -- u_plus_rw= -+ u_plus_rw= - else -- u_plus_rw=,u+rw -+ u_plus_rw=,u+rw - fi - cp_umask=$mode$u_plus_rw;; - esac -@@ -387,51 +382,53 @@ - # directory the slow way, step by step, checking for races as we go. - - case $dstdir in -- /*) prefix='/';; -- [-=\(\)!]*) prefix='./';; -- *) prefix='';; -+ /*) prefix='/';; -+ [-=\(\)!]*) prefix='./';; -+ *) prefix='';; - esac - -+ eval "$initialize_posix_glob" -+ - oIFS=$IFS - IFS=/ -- set -f -+ $posix_glob set -f - set fnord $dstdir - shift -- set +f -+ $posix_glob set +f - IFS=$oIFS - - prefixes= - - for d - do -- test X"$d" = X && continue -+ test X"$d" = X && continue - -- prefix=$prefix$d -- if test -d "$prefix"; then -- prefixes= -- else -- if $posix_mkdir; then -- (umask=$mkdir_umask && -- $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break -- # Don't fail if two instances are running concurrently. -- test -d "$prefix" || exit 1 -- else -- case $prefix in -- *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; -- *) qprefix=$prefix;; -- esac -- prefixes="$prefixes '$qprefix'" -- fi -- fi -- prefix=$prefix/ -+ prefix=$prefix$d -+ if test -d "$prefix"; then -+ prefixes= -+ else -+ if $posix_mkdir; then -+ (umask=$mkdir_umask && -+ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break -+ # Don't fail if two instances are running concurrently. -+ test -d "$prefix" || exit 1 -+ else -+ case $prefix in -+ *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; -+ *) qprefix=$prefix;; -+ esac -+ prefixes="$prefixes '$qprefix'" -+ fi -+ fi -+ prefix=$prefix/ - done - - if test -n "$prefixes"; then -- # Don't fail if two instances are running concurrently. -- (umask $mkdir_umask && -- eval "\$doit_exec \$mkdirprog $prefixes") || -- test -d "$dstdir" || exit 1 -- obsolete_mkdir_used=true -+ # Don't fail if two instances are running concurrently. -+ (umask $mkdir_umask && -+ eval "\$doit_exec \$mkdirprog $prefixes") || -+ test -d "$dstdir" || exit 1 -+ obsolete_mkdir_used=true - fi - fi - fi -@@ -466,12 +463,15 @@ - - # If -C, don't bother to copy if it wouldn't change the file. - if $copy_on_change && -- old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && -- new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && -- set -f && -+ old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && -+ new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && -+ -+ eval "$initialize_posix_glob" && -+ $posix_glob set -f && - set X $old && old=:$2:$4:$5:$6 && - set X $new && new=:$2:$4:$5:$6 && -- set +f && -+ $posix_glob set +f && -+ - test "$old" = "$new" && - $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 - then -@@ -484,24 +484,24 @@ - # to itself, or perhaps because mv is so ancient that it does not - # support -f. - { -- # Now remove or move aside any old file at destination location. -- # We try this two ways since rm can't unlink itself on some -- # systems and the destination file might be busy for other -- # reasons. In this case, the final cleanup might fail but the new -- # file should still install successfully. -- { -- test ! -f "$dst" || -- $doit $rmcmd -f "$dst" 2>/dev/null || -- { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && -- { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } -- } || -- { echo "$0: cannot unlink or rename $dst" >&2 -- (exit 1); exit 1 -- } -- } && -+ # Now remove or move aside any old file at destination location. -+ # We try this two ways since rm can't unlink itself on some -+ # systems and the destination file might be busy for other -+ # reasons. In this case, the final cleanup might fail but the new -+ # file should still install successfully. -+ { -+ test ! -f "$dst" || -+ $doit $rmcmd -f "$dst" 2>/dev/null || -+ { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && -+ { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } -+ } || -+ { echo "$0: cannot unlink or rename $dst" >&2 -+ (exit 1); exit 1 -+ } -+ } && - -- # Now rename the file to the real destination. -- $doit $mvcmd "$dsttmp" "$dst" -+ # Now rename the file to the real destination. -+ $doit $mvcmd "$dsttmp" "$dst" - } - fi || exit 1 - -@@ -513,6 +513,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/libtool.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/libtool.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/libtool.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/libtool.m4 2020-07-16 10:48:35.364065380 +0200 -@@ -1,6 +1,8 @@ - # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- - # --# Copyright (C) 1996-2001, 2003-2015 Free Software Foundation, Inc. -+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -+# 2006, 2007, 2008, 2009, 2010, 2011 Free Software -+# Foundation, Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is free software; the Free Software Foundation gives -@@ -8,30 +10,36 @@ - # modifications, as long as this notice is preserved. - - m4_define([_LT_COPYING], [dnl --# Copyright (C) 2014 Free Software Foundation, Inc. --# This is free software; see the source for copying conditions. There is NO --# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -- --# GNU Libtool is free software; you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation; either version 2 of of the License, or --# (at your option) any later version. -+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -+# 2006, 2007, 2008, 2009, 2010, 2011 Free Software -+# Foundation, Inc. -+# Written by Gordon Matzigkeit, 1996 -+# -+# This file is part of GNU Libtool. -+# -+# GNU Libtool is free software; you can redistribute it and/or -+# modify it under the terms of the GNU General Public License as -+# published by the Free Software Foundation; either version 2 of -+# the License, or (at your option) any later version. - # --# As a special exception to the GNU General Public License, if you --# distribute this file as part of a program or library that is built --# using GNU Libtool, you may include this file under the same --# distribution terms that you use for the rest of that program. -+# As a special exception to the GNU General Public License, -+# if you distribute this file as part of a program or library that -+# is built using GNU Libtool, you may include this file under the -+# same distribution terms that you use for the rest of that program. - # --# GNU Libtool is distributed in the hope that it will be useful, but --# WITHOUT ANY WARRANTY; without even the implied warranty of -+# GNU Libtool is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of - # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - # GNU General Public License for more details. - # - # You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -+# along with GNU Libtool; see the file COPYING. If not, a copy -+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or -+# obtained by writing to the Free Software Foundation, Inc., -+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - ]) - --# serial 58 LT_INIT -+# serial 57 LT_INIT - - - # LT_PREREQ(VERSION) -@@ -59,7 +67,7 @@ - # LT_INIT([OPTIONS]) - # ------------------ - AC_DEFUN([LT_INIT], --[AC_PREREQ([2.62])dnl We use AC_PATH_PROGS_FEATURE_CHECK -+[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT - AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl - AC_BEFORE([$0], [LT_LANG])dnl - AC_BEFORE([$0], [LT_OUTPUT])dnl -@@ -83,7 +91,7 @@ - _LT_SET_OPTIONS([$0], [$1]) - - # This can be used to rebuild libtool when needed --LIBTOOL_DEPS=$ltmain -+LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. - LIBTOOL='$(SHELL) $(top_builddir)/libtool' -@@ -103,43 +111,26 @@ - dnl AC_DEFUN([AM_PROG_LIBTOOL], []) - - --# _LT_PREPARE_CC_BASENAME --# ----------------------- --m4_defun([_LT_PREPARE_CC_BASENAME], [ --# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. --func_cc_basename () --{ -- for cc_temp in @S|@*""; do -- case $cc_temp in -- compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; -- distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; -- \-*) ;; -- *) break;; -- esac -- done -- func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` --} --])# _LT_PREPARE_CC_BASENAME -- -- - # _LT_CC_BASENAME(CC) - # ------------------- --# It would be clearer to call AC_REQUIREs from _LT_PREPARE_CC_BASENAME, --# but that macro is also expanded into generated libtool script, which --# arranges for $SED and $ECHO to be set by different means. -+# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. - m4_defun([_LT_CC_BASENAME], --[m4_require([_LT_PREPARE_CC_BASENAME])dnl --AC_REQUIRE([_LT_DECL_SED])dnl --AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl --func_cc_basename $1 --cc_basename=$func_cc_basename_result -+[for cc_temp in $1""; do -+ case $cc_temp in -+ compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; -+ distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; -+ \-*) ;; -+ *) break;; -+ esac -+done -+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - ]) - - - # _LT_FILEUTILS_DEFAULTS - # ---------------------- - # It is okay to use these file commands and assume they have been set --# sensibly after 'm4_require([_LT_FILEUTILS_DEFAULTS])'. -+# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'. - m4_defun([_LT_FILEUTILS_DEFAULTS], - [: ${CP="cp -f"} - : ${MV="mv -f"} -@@ -186,16 +177,15 @@ - m4_require([_LT_CMD_OLD_ARCHIVE])dnl - m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl - m4_require([_LT_WITH_SYSROOT])dnl --m4_require([_LT_CMD_TRUNCATE])dnl - - _LT_CONFIG_LIBTOOL_INIT([ --# See if we are running on zsh, and set the options that allow our -+# See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes INIT. --if test -n "\${ZSH_VERSION+set}"; then -+if test -n "\${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - ]) --if test -n "${ZSH_VERSION+set}"; then -+if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - -@@ -208,7 +198,7 @@ - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. -- if test set != "${COLLECT_NAMES+set}"; then -+ if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi -@@ -219,14 +209,14 @@ - ofile=libtool - can_build_shared=yes - --# All known linkers require a '.a' archive for static linking (except MSVC, -+# All known linkers require a `.a' archive for static linking (except MSVC, - # which needs '.lib'). - libext=a - --with_gnu_ld=$lt_cv_prog_gnu_ld -+with_gnu_ld="$lt_cv_prog_gnu_ld" - --old_CC=$CC --old_CFLAGS=$CFLAGS -+old_CC="$CC" -+old_CFLAGS="$CFLAGS" - - # Set sane defaults for various variables - test -z "$CC" && CC=cc -@@ -279,14 +269,14 @@ - - # _LT_PROG_LTMAIN - # --------------- --# Note that this code is called both from 'configure', and 'config.status' -+# Note that this code is called both from `configure', and `config.status' - # now that we use AC_CONFIG_COMMANDS to generate libtool. Notably, --# 'config.status' has no value for ac_aux_dir unless we are using Automake, -+# `config.status' has no value for ac_aux_dir unless we are using Automake, - # so we pass a copy along to make sure it has a sensible value anyway. - m4_defun([_LT_PROG_LTMAIN], - [m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl - _LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir']) --ltmain=$ac_aux_dir/ltmain.sh -+ltmain="$ac_aux_dir/ltmain.sh" - ])# _LT_PROG_LTMAIN - - -@@ -296,7 +286,7 @@ - - # So that we can recreate a full libtool script including additional - # tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS --# in macros and then make a single call at the end using the 'libtool' -+# in macros and then make a single call at the end using the `libtool' - # label. - - -@@ -431,8 +421,8 @@ - - # _LT_CONFIG_STATUS_DECLARE([VARNAME]) - # ------------------------------------ --# Quote a variable value, and forward it to 'config.status' so that its --# declaration there will have the same value as in 'configure'. VARNAME -+# Quote a variable value, and forward it to `config.status' so that its -+# declaration there will have the same value as in `configure'. VARNAME - # must have a single quote delimited value for this to work. - m4_define([_LT_CONFIG_STATUS_DECLARE], - [$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`']) -@@ -456,7 +446,7 @@ - # Output comment and list of tags supported by the script - m4_defun([_LT_LIBTOOL_TAGS], - [_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl --available_tags='_LT_TAGS'dnl -+available_tags="_LT_TAGS"dnl - ]) - - -@@ -484,7 +474,7 @@ - # _LT_LIBTOOL_CONFIG_VARS - # ----------------------- - # Produce commented declarations of non-tagged libtool config variables --# suitable for insertion in the LIBTOOL CONFIG section of the 'libtool' -+# suitable for insertion in the LIBTOOL CONFIG section of the `libtool' - # script. Tagged libtool config variables (even for the LIBTOOL CONFIG - # section) are produced by _LT_LIBTOOL_TAG_VARS. - m4_defun([_LT_LIBTOOL_CONFIG_VARS], -@@ -510,8 +500,8 @@ - # Send accumulated output to $CONFIG_STATUS. Thanks to the lists of - # variables for single and double quote escaping we saved from calls - # to _LT_DECL, we can put quote escaped variables declarations --# into 'config.status', and then the shell code to quote escape them in --# for loops in 'config.status'. Finally, any additional code accumulated -+# into `config.status', and then the shell code to quote escape them in -+# for loops in `config.status'. Finally, any additional code accumulated - # from calls to _LT_CONFIG_LIBTOOL_INIT is expanded. - m4_defun([_LT_CONFIG_COMMANDS], - [AC_PROVIDE_IFELSE([LT_OUTPUT], -@@ -557,7 +547,7 @@ - ]], lt_decl_quote_varnames); do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[[\\\\\\\`\\"\\\$]]*) -- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes -+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" -@@ -570,7 +560,7 @@ - ]], lt_decl_dquote_varnames); do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[[\\\\\\\`\\"\\\$]]*) -- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes -+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" -@@ -586,7 +576,7 @@ - # Generate a child script FILE with all initialization necessary to - # reuse the environment learned by the parent script, and make the - # file executable. If COMMENT is supplied, it is inserted after the --# '#!' sequence but before initialization text begins. After this -+# `#!' sequence but before initialization text begins. After this - # macro, additional text can be appended to FILE to form the body of - # the child script. The macro ends with non-zero status if the - # file could not be fully written (such as if the disk is full). -@@ -608,7 +598,7 @@ - _AS_PREPARE - exec AS_MESSAGE_FD>&1 - _ASEOF --test 0 = "$lt_write_fail" && chmod +x $1[]dnl -+test $lt_write_fail = 0 && chmod +x $1[]dnl - m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT - - # LT_OUTPUT -@@ -631,7 +621,7 @@ - } >&AS_MESSAGE_LOG_FD - - lt_cl_help="\ --'$as_me' creates a local libtool stub from the current configuration, -+\`$as_me' creates a local libtool stub from the current configuration, - for use in further configure time tests before the real libtool is - generated. - -@@ -653,7 +643,7 @@ - This config.lt script is free software; the Free Software Foundation - gives unlimited permision to copy, distribute and modify it." - --while test 0 != $[#] -+while test $[#] != 0 - do - case $[1] in - --version | --v* | -V ) -@@ -666,10 +656,10 @@ - lt_cl_silent=: ;; - - -*) AC_MSG_ERROR([unrecognized option: $[1] --Try '$[0] --help' for more information.]) ;; -+Try \`$[0] --help' for more information.]) ;; - - *) AC_MSG_ERROR([unrecognized argument: $[1] --Try '$[0] --help' for more information.]) ;; -+Try \`$[0] --help' for more information.]) ;; - esac - shift - done -@@ -695,7 +685,7 @@ - # open by configure. Here we exec the FD to /dev/null, effectively closing - # config.log, so it can be properly (re)opened and appended to by config.lt. - lt_cl_success=: --test yes = "$silent" && -+test "$silent" = yes && - lt_config_lt_args="$lt_config_lt_args --quiet" - exec AS_MESSAGE_LOG_FD>/dev/null - $SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false -@@ -715,30 +705,27 @@ - _LT_CONFIG_SAVE_COMMANDS([ - m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl - m4_if(_LT_TAG, [C], [ -- # See if we are running on zsh, and set the options that allow our -+ # See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes. -- if test -n "${ZSH_VERSION+set}"; then -+ if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - -- cfgfile=${ofile}T -+ cfgfile="${ofile}T" - trap "$RM \"$cfgfile\"; exit 1" 1 2 15 - $RM "$cfgfile" - - cat <<_LT_EOF >> "$cfgfile" - #! $SHELL --# Generated automatically by $as_me ($PACKAGE) $VERSION --# NOTE: Changes made to this file will be lost: look at ltmain.sh. -- --# Provide generalized library-building support services. --# Written by Gordon Matzigkeit, 1996 - -+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. -+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION -+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: -+# NOTE: Changes made to this file will be lost: look at ltmain.sh. -+# - _LT_COPYING - _LT_LIBTOOL_TAGS - --# Configured defaults for sys_lib_dlsearch_path munging. --: \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"} -- - # ### BEGIN LIBTOOL CONFIG - _LT_LIBTOOL_CONFIG_VARS - _LT_LIBTOOL_TAG_VARS -@@ -746,24 +733,13 @@ - - _LT_EOF - -- cat <<'_LT_EOF' >> "$cfgfile" -- --# ### BEGIN FUNCTIONS SHARED WITH CONFIGURE -- --_LT_PREPARE_MUNGE_PATH_LIST --_LT_PREPARE_CC_BASENAME -- --# ### END FUNCTIONS SHARED WITH CONFIGURE -- --_LT_EOF -- - case $host_os in - aix3*) - cat <<\_LT_EOF >> "$cfgfile" - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. --if test set != "${COLLECT_NAMES+set}"; then -+if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi -@@ -780,6 +756,8 @@ - sed '$q' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - -+ _LT_PROG_REPLACE_SHELLFNS -+ - mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" -@@ -797,6 +775,7 @@ - [m4_if([$1], [], [ - PACKAGE='$PACKAGE' - VERSION='$VERSION' -+ TIMESTAMP='$TIMESTAMP' - RM='$RM' - ofile='$ofile'], []) - ])dnl /_LT_CONFIG_SAVE_COMMANDS -@@ -995,7 +974,7 @@ - - AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod], - [lt_cv_apple_cc_single_mod=no -- if test -z "$LT_MULTI_MODULE"; then -+ if test -z "${LT_MULTI_MODULE}"; then - # By default we will add the -single_module flag. You can override - # by either setting the environment variable LT_MULTI_MODULE - # non-empty at configure time, or by adding -multi_module to the -@@ -1013,7 +992,7 @@ - cat conftest.err >&AS_MESSAGE_LOG_FD - # Otherwise, if the output was created with a 0 exit code from - # the compiler, it worked. -- elif test -f libconftest.dylib && test 0 = "$_lt_result"; then -+ elif test -f libconftest.dylib && test $_lt_result -eq 0; then - lt_cv_apple_cc_single_mod=yes - else - cat conftest.err >&AS_MESSAGE_LOG_FD -@@ -1031,7 +1010,7 @@ - AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], - [lt_cv_ld_exported_symbols_list=yes], - [lt_cv_ld_exported_symbols_list=no]) -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - ]) - - AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load], -@@ -1053,7 +1032,7 @@ - _lt_result=$? - if test -s conftest.err && $GREP force_load conftest.err; then - cat conftest.err >&AS_MESSAGE_LOG_FD -- elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then -+ elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then - lt_cv_ld_force_load=yes - else - cat conftest.err >&AS_MESSAGE_LOG_FD -@@ -1063,32 +1042,32 @@ - ]) - case $host_os in - rhapsody* | darwin1.[[012]]) -- _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; - darwin1.*) -- _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - darwin*) # darwin 5.x on - # if running on 10.5 or later, the deployment target defaults - # to the OS version, if on x86, and 10.4, the deployment - # target defaults to 10.4. Don't you love it? - case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in - 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*) -- _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; -- 10.[[012]][[,.]]*) -- _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; -+ 10.[[012]]*) -+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - 10.*) -- _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - esac - ;; - esac -- if test yes = "$lt_cv_apple_cc_single_mod"; then -+ if test "$lt_cv_apple_cc_single_mod" = "yes"; then - _lt_dar_single_mod='$single_module' - fi -- if test yes = "$lt_cv_ld_exported_symbols_list"; then -- _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym' -+ if test "$lt_cv_ld_exported_symbols_list" = "yes"; then -+ _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' - else -- _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib' -+ _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' - fi -- if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then -+ if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then - _lt_dsymutil='~$DSYMUTIL $lib || :' - else - _lt_dsymutil= -@@ -1108,29 +1087,29 @@ - _LT_TAGVAR(hardcode_direct, $1)=no - _LT_TAGVAR(hardcode_automatic, $1)=yes - _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported -- if test yes = "$lt_cv_ld_force_load"; then -- _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' -+ if test "$lt_cv_ld_force_load" = "yes"; then -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes], - [FC], [_LT_TAGVAR(compiler_needs_object, $1)=yes]) - else - _LT_TAGVAR(whole_archive_flag_spec, $1)='' - fi - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(allow_undefined_flag, $1)=$_lt_dar_allow_undefined -+ _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined" - case $cc_basename in -- ifort*|nagfor*) _lt_dar_can_shared=yes ;; -+ ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac -- if test yes = "$_lt_dar_can_shared"; then -+ if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all -- _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" -- _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" -- _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" -- _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" -+ _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" -+ _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" -+ _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" -+ _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - m4_if([$1], [CXX], --[ if test yes != "$lt_cv_apple_cc_single_mod"; then -- _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dsymutil" -- _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dar_export_syms$_lt_dsymutil" -+[ if test "$lt_cv_apple_cc_single_mod" != "yes"; then -+ _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}" -+ _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}" - fi - ],[]) - else -@@ -1150,7 +1129,7 @@ - # Allow to override them for all tags through lt_cv_aix_libpath. - m4_defun([_LT_SYS_MODULE_PATH_AIX], - [m4_require([_LT_DECL_SED])dnl --if test set = "${lt_cv_aix_libpath+set}"; then -+if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])], -@@ -1168,7 +1147,7 @@ - _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi],[]) - if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then -- _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=/usr/lib:/lib -+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib" - fi - ]) - aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1]) -@@ -1188,8 +1167,8 @@ - # ----------------------- - # Find how we can fake an echo command that does not interpret backslash. - # In particular, with Autoconf 2.60 or later we add some code to the start --# of the generated configure script that will find a shell with a builtin --# printf (that we can use as an echo command). -+# of the generated configure script which will find a shell with a builtin -+# printf (which we can use as an echo command). - m4_defun([_LT_PROG_ECHO_BACKSLASH], - [ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' - ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO -@@ -1217,10 +1196,10 @@ - # Invoke $ECHO with all args, space-separated. - func_echo_all () - { -- $ECHO "$*" -+ $ECHO "$*" - } - --case $ECHO in -+case "$ECHO" in - printf*) AC_MSG_RESULT([printf]) ;; - print*) AC_MSG_RESULT([print -r]) ;; - *) AC_MSG_RESULT([cat]) ;; -@@ -1246,17 +1225,16 @@ - AC_DEFUN([_LT_WITH_SYSROOT], - [AC_MSG_CHECKING([for sysroot]) - AC_ARG_WITH([sysroot], --[AS_HELP_STRING([--with-sysroot@<:@=DIR@:>@], -- [Search for dependent libraries within DIR (or the compiler's sysroot -- if not specified).])], -+[ --with-sysroot[=DIR] Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified).], - [], [with_sysroot=no]) - - dnl lt_sysroot will always be passed unquoted. We quote it here - dnl in case the user passed a directory name. - lt_sysroot= --case $with_sysroot in #( -+case ${with_sysroot} in #( - yes) -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - lt_sysroot=`$CC --print-sysroot 2>/dev/null` - fi - ;; #( -@@ -1266,14 +1244,14 @@ - no|'') - ;; #( - *) -- AC_MSG_RESULT([$with_sysroot]) -+ AC_MSG_RESULT([${with_sysroot}]) - AC_MSG_ERROR([The sysroot must be an absolute path.]) - ;; - esac - - AC_MSG_RESULT([${lt_sysroot:-no}]) - _LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl --[dependent libraries, and where our libraries should be installed.])]) -+[dependent libraries, and in which our libraries should be installed.])]) - - # _LT_ENABLE_LOCK - # --------------- -@@ -1281,33 +1259,31 @@ - [AC_ARG_ENABLE([libtool-lock], - [AS_HELP_STRING([--disable-libtool-lock], - [avoid locking (might break parallel builds)])]) --test no = "$enable_libtool_lock" || enable_libtool_lock=yes -+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes - - # Some flags need to be propagated to the compiler or linker for good - # libtool support. - case $host in - ia64-*-hpux*) -- # Find out what ABI is being produced by ac_compile, and set mode -- # options accordingly. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.$ac_objext` in - *ELF-32*) -- HPUX_IA64_MODE=32 -+ HPUX_IA64_MODE="32" - ;; - *ELF-64*) -- HPUX_IA64_MODE=64 -+ HPUX_IA64_MODE="64" - ;; - esac - fi - rm -rf conftest* - ;; - *-*-irix6*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -+ # Find out which ABI we are using. - echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -melf32bsmip" -@@ -1336,46 +1312,9 @@ - rm -rf conftest* - ;; - --mips64*-*linux*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -- echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext -- if AC_TRY_EVAL(ac_compile); then -- emul=elf -- case `/usr/bin/file conftest.$ac_objext` in -- *32-bit*) -- emul="${emul}32" -- ;; -- *64-bit*) -- emul="${emul}64" -- ;; -- esac -- case `/usr/bin/file conftest.$ac_objext` in -- *MSB*) -- emul="${emul}btsmip" -- ;; -- *LSB*) -- emul="${emul}ltsmip" -- ;; -- esac -- case `/usr/bin/file conftest.$ac_objext` in -- *N32*) -- emul="${emul}n32" -- ;; -- esac -- LD="${LD-ld} -m $emul" -- fi -- rm -rf conftest* -- ;; -- --x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ -+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ - s390*-*linux*|s390*-*tpf*|sparc*-*linux*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. Note that the listed cases only cover the -- # situations where additional linker options are needed (such as when -- # doing 32-bit compilation for a host where ld defaults to 64-bit, or -- # vice versa); the common cases where no linker options are needed do -- # not appear in the list. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.o` in -@@ -1385,19 +1324,9 @@ - LD="${LD-ld} -m elf_i386_fbsd" - ;; - x86_64-*linux*) -- case `/usr/bin/file conftest.o` in -- *x86-64*) -- LD="${LD-ld} -m elf32_x86_64" -- ;; -- *) -- LD="${LD-ld} -m elf_i386" -- ;; -- esac -- ;; -- powerpc64le-*linux*) -- LD="${LD-ld} -m elf32lppclinux" -+ LD="${LD-ld} -m elf_i386" - ;; -- powerpc64-*linux*) -+ ppc64-*linux*|powerpc64-*linux*) - LD="${LD-ld} -m elf32ppclinux" - ;; - s390x-*linux*) -@@ -1416,10 +1345,7 @@ - x86_64-*linux*) - LD="${LD-ld} -m elf_x86_64" - ;; -- powerpcle-*linux*) -- LD="${LD-ld} -m elf64lppc" -- ;; -- powerpc-*linux*) -+ ppc*-*linux*|powerpc*-*linux*) - LD="${LD-ld} -m elf64ppc" - ;; - s390*-*linux*|s390*-*tpf*) -@@ -1437,20 +1363,19 @@ - - *-*-sco3.2v5*) - # On SCO OpenServer 5, we need -belf to get full-featured binaries. -- SAVE_CFLAGS=$CFLAGS -+ SAVE_CFLAGS="$CFLAGS" - CFLAGS="$CFLAGS -belf" - AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf, - [AC_LANG_PUSH(C) - AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no]) - AC_LANG_POP]) -- if test yes != "$lt_cv_cc_needs_belf"; then -+ if test x"$lt_cv_cc_needs_belf" != x"yes"; then - # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf -- CFLAGS=$SAVE_CFLAGS -+ CFLAGS="$SAVE_CFLAGS" - fi - ;; - *-*solaris*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.o` in -@@ -1458,7 +1383,7 @@ - case $lt_cv_prog_gnu_ld in - yes*) - case $host in -- i?86-*-solaris*|x86_64-*-solaris*) -+ i?86-*-solaris*) - LD="${LD-ld} -m elf_x86_64" - ;; - sparc*-*-solaris*) -@@ -1467,7 +1392,7 @@ - esac - # GNU ld 2.21 introduced _sol2 emulations. Use them if available. - if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then -- LD=${LD-ld}_sol2 -+ LD="${LD-ld}_sol2" - fi - ;; - *) -@@ -1483,7 +1408,7 @@ - ;; - esac - --need_locks=$enable_libtool_lock -+need_locks="$enable_libtool_lock" - ])# _LT_ENABLE_LOCK - - -@@ -1502,11 +1427,11 @@ - [echo conftest.$ac_objext > conftest.lst - lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD' - AC_TRY_EVAL([lt_ar_try]) -- if test 0 -eq "$ac_status"; then -+ if test "$ac_status" -eq 0; then - # Ensure the archiver fails upon bogus file names. - rm -f conftest.$ac_objext libconftest.a - AC_TRY_EVAL([lt_ar_try]) -- if test 0 -ne "$ac_status"; then -+ if test "$ac_status" -ne 0; then - lt_cv_ar_at_file=@ - fi - fi -@@ -1514,7 +1439,7 @@ - ]) - ]) - --if test no = "$lt_cv_ar_at_file"; then -+if test "x$lt_cv_ar_at_file" = xno; then - archiver_list_spec= - else - archiver_list_spec=$lt_cv_ar_at_file -@@ -1545,7 +1470,7 @@ - - if test -n "$RANLIB"; then - case $host_os in -- bitrig* | openbsd*) -+ openbsd*) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" - ;; - *) -@@ -1581,7 +1506,7 @@ - [$2=no - m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4]) - echo "$lt_simple_compile_test_code" > conftest.$ac_ext -- lt_compiler_flag="$3" ## exclude from sc_useless_quotes_in_assignment -+ lt_compiler_flag="$3" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins -@@ -1608,7 +1533,7 @@ - $RM conftest* - ]) - --if test yes = "[$]$2"; then -+if test x"[$]$2" = xyes; then - m4_if([$5], , :, [$5]) - else - m4_if([$6], , :, [$6]) -@@ -1630,7 +1555,7 @@ - m4_require([_LT_DECL_SED])dnl - AC_CACHE_CHECK([$1], [$2], - [$2=no -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $3" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then -@@ -1649,10 +1574,10 @@ - fi - fi - $RM -r conftest* -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - ]) - --if test yes = "[$]$2"; then -+if test x"[$]$2" = xyes; then - m4_if([$4], , :, [$4]) - else - m4_if([$5], , :, [$5]) -@@ -1673,7 +1598,7 @@ - AC_MSG_CHECKING([the maximum length of command line arguments]) - AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl - i=0 -- teststring=ABCD -+ teststring="ABCD" - - case $build_os in - msdosdjgpp*) -@@ -1713,7 +1638,7 @@ - lt_cv_sys_max_cmd_len=8192; - ;; - -- bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*) -+ netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) - # This has been around since 386BSD, at least. Likely further. - if test -x /sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` -@@ -1763,23 +1688,22 @@ - ;; - *) - lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` -- if test -n "$lt_cv_sys_max_cmd_len" && \ -- test undefined != "$lt_cv_sys_max_cmd_len"; then -+ if test -n "$lt_cv_sys_max_cmd_len"; then - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - else - # Make teststring a little bigger before we do anything with it. - # a 1K string should be a reasonable start. -- for i in 1 2 3 4 5 6 7 8; do -+ for i in 1 2 3 4 5 6 7 8 ; do - teststring=$teststring$teststring - done - SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} - # If test is not a shell built-in, we'll probably end up computing a - # maximum length that is only half of the actual maximum length, but - # we can't tell. -- while { test X`env echo "$teststring$teststring" 2>/dev/null` \ -+ while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ - = "X$teststring$teststring"; } >/dev/null 2>&1 && -- test 17 != "$i" # 1/2 MB should be enough -+ test $i != 17 # 1/2 MB should be enough - do - i=`expr $i + 1` - teststring=$teststring$teststring -@@ -1795,7 +1719,7 @@ - ;; - esac - ]) --if test -n "$lt_cv_sys_max_cmd_len"; then -+if test -n $lt_cv_sys_max_cmd_len ; then - AC_MSG_RESULT($lt_cv_sys_max_cmd_len) - else - AC_MSG_RESULT(none) -@@ -1823,7 +1747,7 @@ - # ---------------------------------------------------------------- - m4_defun([_LT_TRY_DLOPEN_SELF], - [m4_require([_LT_HEADER_DLFCN])dnl --if test yes = "$cross_compiling"; then : -+if test "$cross_compiling" = yes; then : - [$4] - else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 -@@ -1870,9 +1794,9 @@ - # endif - #endif - --/* When -fvisibility=hidden is used, assume the code has been annotated -+/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ --#if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) - int fnord () __attribute__((visibility("default"))); - #endif - -@@ -1898,7 +1822,7 @@ - return status; - }] - _LT_EOF -- if AC_TRY_EVAL(ac_link) && test -s "conftest$ac_exeext" 2>/dev/null; then -+ if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null - lt_status=$? - case x$lt_status in -@@ -1919,7 +1843,7 @@ - # ------------------ - AC_DEFUN([LT_SYS_DLOPEN_SELF], - [m4_require([_LT_HEADER_DLFCN])dnl --if test yes != "$enable_dlopen"; then -+if test "x$enable_dlopen" != xyes; then - enable_dlopen=unknown - enable_dlopen_self=unknown - enable_dlopen_self_static=unknown -@@ -1929,52 +1853,44 @@ - - case $host_os in - beos*) -- lt_cv_dlopen=load_add_on -+ lt_cv_dlopen="load_add_on" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ;; - - mingw* | pw32* | cegcc*) -- lt_cv_dlopen=LoadLibrary -+ lt_cv_dlopen="LoadLibrary" - lt_cv_dlopen_libs= - ;; - - cygwin*) -- lt_cv_dlopen=dlopen -+ lt_cv_dlopen="dlopen" - lt_cv_dlopen_libs= - ;; - - darwin*) -- # if libdl is installed we need to link against it -+ # if libdl is installed we need to link against it - AC_CHECK_LIB([dl], [dlopen], -- [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl],[ -- lt_cv_dlopen=dyld -+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[ -+ lt_cv_dlopen="dyld" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ]) - ;; - -- tpf*) -- # Don't try to run any link tests for TPF. We know it's impossible -- # because TPF is a cross-compiler, and we know how we open DSOs. -- lt_cv_dlopen=dlopen -- lt_cv_dlopen_libs= -- lt_cv_dlopen_self=no -- ;; -- - *) - AC_CHECK_FUNC([shl_load], -- [lt_cv_dlopen=shl_load], -+ [lt_cv_dlopen="shl_load"], - [AC_CHECK_LIB([dld], [shl_load], -- [lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld], -+ [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"], - [AC_CHECK_FUNC([dlopen], -- [lt_cv_dlopen=dlopen], -+ [lt_cv_dlopen="dlopen"], - [AC_CHECK_LIB([dl], [dlopen], -- [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl], -+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"], - [AC_CHECK_LIB([svld], [dlopen], -- [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld], -+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"], - [AC_CHECK_LIB([dld], [dld_link], -- [lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld]) -+ [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"]) - ]) - ]) - ]) -@@ -1983,21 +1899,21 @@ - ;; - esac - -- if test no = "$lt_cv_dlopen"; then -- enable_dlopen=no -- else -+ if test "x$lt_cv_dlopen" != xno; then - enable_dlopen=yes -+ else -+ enable_dlopen=no - fi - - case $lt_cv_dlopen in - dlopen) -- save_CPPFLAGS=$CPPFLAGS -- test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" -+ save_CPPFLAGS="$CPPFLAGS" -+ test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" - -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" - -- save_LIBS=$LIBS -+ save_LIBS="$LIBS" - LIBS="$lt_cv_dlopen_libs $LIBS" - - AC_CACHE_CHECK([whether a program can dlopen itself], -@@ -2007,7 +1923,7 @@ - lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross) - ]) - -- if test yes = "$lt_cv_dlopen_self"; then -+ if test "x$lt_cv_dlopen_self" = xyes; then - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" - AC_CACHE_CHECK([whether a statically linked program can dlopen itself], - lt_cv_dlopen_self_static, [dnl -@@ -2017,9 +1933,9 @@ - ]) - fi - -- CPPFLAGS=$save_CPPFLAGS -- LDFLAGS=$save_LDFLAGS -- LIBS=$save_LIBS -+ CPPFLAGS="$save_CPPFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+ LIBS="$save_LIBS" - ;; - esac - -@@ -2111,8 +2027,8 @@ - m4_require([_LT_FILEUTILS_DEFAULTS])dnl - _LT_COMPILER_C_O([$1]) - --hard_links=nottested --if test no = "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" && test no != "$need_locks"; then -+hard_links="nottested" -+if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - AC_MSG_CHECKING([if we can lock with hard links]) - hard_links=yes -@@ -2122,8 +2038,8 @@ - ln conftest.a conftest.b 2>&5 || hard_links=no - ln conftest.a conftest.b 2>/dev/null && hard_links=no - AC_MSG_RESULT([$hard_links]) -- if test no = "$hard_links"; then -- AC_MSG_WARN(['$CC' does not support '-c -o', so 'make -j' may be unsafe]) -+ if test "$hard_links" = no; then -+ AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe]) - need_locks=warn - fi - else -@@ -2150,8 +2066,8 @@ - _LT_DECL([], [objdir], [0], - [The name of the directory that contains temporary libtool files])dnl - m4_pattern_allow([LT_OBJDIR])dnl --AC_DEFINE_UNQUOTED([LT_OBJDIR], "$lt_cv_objdir/", -- [Define to the sub-directory where libtool stores uninstalled libraries.]) -+AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/", -+ [Define to the sub-directory in which libtool stores uninstalled libraries.]) - ])# _LT_CHECK_OBJDIR - - -@@ -2163,15 +2079,15 @@ - _LT_TAGVAR(hardcode_action, $1)= - if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" || - test -n "$_LT_TAGVAR(runpath_var, $1)" || -- test yes = "$_LT_TAGVAR(hardcode_automatic, $1)"; then -+ test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then - - # We can hardcode non-existent directories. -- if test no != "$_LT_TAGVAR(hardcode_direct, $1)" && -+ if test "$_LT_TAGVAR(hardcode_direct, $1)" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one -- ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" && -- test no != "$_LT_TAGVAR(hardcode_minus_L, $1)"; then -+ ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no && -+ test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then - # Linking always hardcodes the temporary library directory. - _LT_TAGVAR(hardcode_action, $1)=relink - else -@@ -2185,12 +2101,12 @@ - fi - AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)]) - --if test relink = "$_LT_TAGVAR(hardcode_action, $1)" || -- test yes = "$_LT_TAGVAR(inherit_rpath, $1)"; then -+if test "$_LT_TAGVAR(hardcode_action, $1)" = relink || -+ test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then - # Fast installation is not supported - enable_fast_install=no --elif test yes = "$shlibpath_overrides_runpath" || -- test no = "$enable_shared"; then -+elif test "$shlibpath_overrides_runpath" = yes || -+ test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless - fi -@@ -2214,7 +2130,7 @@ - # FIXME - insert some real tests, host_os isn't really good enough - case $host_os in - darwin*) -- if test -n "$STRIP"; then -+ if test -n "$STRIP" ; then - striplib="$STRIP -x" - old_striplib="$STRIP -S" - AC_MSG_RESULT([yes]) -@@ -2232,47 +2148,6 @@ - ])# _LT_CMD_STRIPLIB - - --# _LT_PREPARE_MUNGE_PATH_LIST --# --------------------------- --# Make sure func_munge_path_list() is defined correctly. --m4_defun([_LT_PREPARE_MUNGE_PATH_LIST], --[[# func_munge_path_list VARIABLE PATH --# ----------------------------------- --# VARIABLE is name of variable containing _space_ separated list of --# directories to be munged by the contents of PATH, which is string --# having a format: --# "DIR[:DIR]:" --# string "DIR[ DIR]" will be prepended to VARIABLE --# ":DIR[:DIR]" --# string "DIR[ DIR]" will be appended to VARIABLE --# "DIRP[:DIRP]::[DIRA:]DIRA" --# string "DIRP[ DIRP]" will be prepended to VARIABLE and string --# "DIRA[ DIRA]" will be appended to VARIABLE --# "DIR[:DIR]" --# VARIABLE will be replaced by "DIR[ DIR]" --func_munge_path_list () --{ -- case x@S|@2 in -- x) -- ;; -- *:) -- eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'` \@S|@@S|@1\" -- ;; -- x:*) -- eval @S|@1=\"\@S|@@S|@1 `$ECHO @S|@2 | $SED 's/:/ /g'`\" -- ;; -- *::*) -- eval @S|@1=\"\@S|@@S|@1\ `$ECHO @S|@2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" -- eval @S|@1=\"`$ECHO @S|@2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \@S|@@S|@1\" -- ;; -- *) -- eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'`\" -- ;; -- esac --} --]])# _LT_PREPARE_PATH_LIST -- -- - # _LT_SYS_DYNAMIC_LINKER([TAG]) - # ----------------------------- - # PORTME Fill in your ld.so characteristics -@@ -2283,18 +2158,17 @@ - m4_require([_LT_DECL_OBJDUMP])dnl - m4_require([_LT_DECL_SED])dnl - m4_require([_LT_CHECK_SHELL_FEATURES])dnl --m4_require([_LT_PREPARE_MUNGE_PATH_LIST])dnl - AC_MSG_CHECKING([dynamic linker characteristics]) - m4_if([$1], - [], [ --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - case $host_os in -- darwin*) lt_awk_arg='/^libraries:/,/LR/' ;; -- *) lt_awk_arg='/^libraries:/' ;; -+ darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; -+ *) lt_awk_arg="/^libraries:/" ;; - esac - case $host_os in -- mingw* | cegcc*) lt_sed_strip_eq='s|=\([[A-Za-z]]:\)|\1|g' ;; -- *) lt_sed_strip_eq='s|=/|/|g' ;; -+ mingw* | cegcc*) lt_sed_strip_eq="s,=\([[A-Za-z]]:\),\1,g" ;; -+ *) lt_sed_strip_eq="s,=/,/,g" ;; - esac - lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` - case $lt_search_path_spec in -@@ -2310,35 +2184,28 @@ - ;; - esac - # Ok, now we have the path, separated by spaces, we can step through it -- # and add multilib dir if necessary... -+ # and add multilib dir if necessary. - lt_tmp_lt_search_path_spec= -- lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` -- # ...but if some path component already ends with the multilib dir we assume -- # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer). -- case "$lt_multi_os_dir; $lt_search_path_spec " in -- "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*) -- lt_multi_os_dir= -- ;; -- esac -+ lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` - for lt_sys_path in $lt_search_path_spec; do -- if test -d "$lt_sys_path$lt_multi_os_dir"; then -- lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir" -- elif test -n "$lt_multi_os_dir"; then -+ if test -d "$lt_sys_path/$lt_multi_os_dir"; then -+ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" -+ else - test -d "$lt_sys_path" && \ - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" - fi - done - lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' --BEGIN {RS = " "; FS = "/|\n";} { -- lt_foo = ""; -- lt_count = 0; -+BEGIN {RS=" "; FS="/|\n";} { -+ lt_foo=""; -+ lt_count=0; - for (lt_i = NF; lt_i > 0; lt_i--) { - if ($lt_i != "" && $lt_i != ".") { - if ($lt_i == "..") { - lt_count++; - } else { - if (lt_count == 0) { -- lt_foo = "/" $lt_i lt_foo; -+ lt_foo="/" $lt_i lt_foo; - } else { - lt_count--; - } -@@ -2352,7 +2219,7 @@ - # for these hosts. - case $host_os in - mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ -- $SED 's|/\([[A-Za-z]]:\)|\1|g'` ;; -+ $SED 's,/\([[A-Za-z]]:\),\1,g'` ;; - esac - sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` - else -@@ -2361,7 +2228,7 @@ - library_names_spec= - libname_spec='lib$name' - soname_spec= --shrext_cmds=.so -+shrext_cmds=".so" - postinstall_cmds= - postuninstall_cmds= - finish_cmds= -@@ -2378,17 +2245,14 @@ - # flags to be left without arguments - need_version=unknown - --AC_ARG_VAR([LT_SYS_LIBRARY_PATH], --[User-defined run-time library search path.]) -- - case $host_os in - aix3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname.a' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. -- soname_spec='$libname$release$shared_ext$major' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - - aix[[4-9]]*) -@@ -2396,91 +2260,41 @@ - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 -- library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with -- # the line '#! .'. This would cause the generated library to -- # depend on '.', always an invalid library. This was fixed in -+ # the line `#! .'. This would cause the generated library to -+ # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[[01]] | aix4.[[01]].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' -- echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then -+ echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac -- # Using Import Files as archive members, it is possible to support -- # filename-based versioning of shared library archives on AIX. While -- # this would work for both with and without runtime linking, it will -- # prevent static linking of such archives. So we do filename-based -- # shared library versioning with .so extension only, which is used -- # when both runtime linking and shared linking is enabled. -- # Unfortunately, runtime linking may impact performance, so we do -- # not want this to be the default eventually. Also, we use the -- # versioned .so libs for executables only if there is the -brtl -- # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. -- # To allow for filename-based versioning support, we need to create -- # libNAME.so.V as an archive file, containing: -- # *) an Import File, referring to the versioned filename of the -- # archive as well as the shared archive member, telling the -- # bitwidth (32 or 64) of that shared object, and providing the -- # list of exported symbols of that shared object, eventually -- # decorated with the 'weak' keyword -- # *) the shared object with the F_LOADONLY flag set, to really avoid -- # it being seen by the linker. -- # At run time we better use the real file rather than another symlink, -- # but for link time we create the symlink libNAME.so -> libNAME.so.V -- -- case $with_aix_soname,$aix_use_runtimelinking in -- # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct -+ # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. -- aix,yes) # traditional libtool -- dynamic_linker='AIX unversionable lib.so' -+ if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib<name>.so - # instead of lib<name>.a to let people know that these are not - # typical AIX shared libraries. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- ;; -- aix,no) # traditional AIX only -- dynamic_linker='AIX lib.a[(]lib.so.V[)]' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- ;; -- svr4,*) # full svr4 only -- dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)]" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,yes) # both, prefer svr4 -- dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)], lib.a[(]lib.so.V[)]" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # unpreferred sharedlib libNAME.a needs extra handling -- postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' -- postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,no) # both, prefer aix -- dynamic_linker="AIX lib.a[(]lib.so.V[)], lib.so.V[(]$shared_archive_member_spec.o[)]" -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling -- postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' -- postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' -- ;; -- esac -+ library_names_spec='${libname}${release}.a $libname.a' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ fi - shlibpath_var=LIBPATH - fi - ;; -@@ -2490,18 +2304,18 @@ - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. -- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' -+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - - beos*) -- library_names_spec='$libname$shared_ext' -+ library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; -@@ -2509,8 +2323,8 @@ - bsdi[[45]]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" -@@ -2522,7 +2336,7 @@ - - cygwin* | mingw* | pw32* | cegcc*) - version_type=windows -- shrext_cmds=.dll -+ shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - -@@ -2531,8 +2345,8 @@ - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ -@@ -2548,17 +2362,17 @@ - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' -- soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' -+ soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - m4_if([$1], [],[ - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"]) - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix -- soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' -- library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' -+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' -@@ -2567,8 +2381,8 @@ - *,cl*) - # Native MSVC - libname_spec='$name' -- soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' -- library_names_spec='$libname.dll.lib' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) -@@ -2595,7 +2409,7 @@ - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) -- sys_lib_search_path_spec=$LIB -+ sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -@@ -2608,8 +2422,8 @@ - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' -@@ -2622,7 +2436,7 @@ - - *) - # Assume MSVC wrapper -- library_names_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext $libname.lib' -+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac -@@ -2635,8 +2449,8 @@ - version_type=darwin - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$major$shared_ext' -+ library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' -+ soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' -@@ -2649,8 +2463,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -2668,13 +2482,12 @@ - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac -@@ -2699,15 +2512,26 @@ - esac - ;; - -+gnu*) -+ version_type=linux # correct to gnu/linux during the next big refactor -+ need_lib_prefix=no -+ need_version=no -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ shlibpath_var=LD_LIBRARY_PATH -+ shlibpath_overrides_runpath=no -+ hardcode_into_libs=yes -+ ;; -+ - haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH -- shlibpath_overrides_runpath=no -+ shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; -@@ -2725,15 +2549,14 @@ - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -- if test 32 = "$HPUX_IA64_MODE"; then -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux32 - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux64 - fi -+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' -@@ -2741,8 +2564,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; -@@ -2751,8 +2574,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... -@@ -2765,8 +2588,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -2777,7 +2600,7 @@ - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix -@@ -2785,8 +2608,8 @@ - esac - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= -@@ -2805,8 +2628,8 @@ - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no -- sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" -- sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" -+ sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" -+ sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -@@ -2815,33 +2638,13 @@ - dynamic_linker=no - ;; - --linux*android*) -- version_type=none # Android doesn't support versioned libraries. -- need_lib_prefix=no -- need_version=no -- library_names_spec='$libname$release$shared_ext' -- soname_spec='$libname$release$shared_ext' -- finish_cmds= -- shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -- -- # This implies no fast_install, which is unacceptable. -- # Some rework will be needed to allow for fast_install -- # before this can be enabled. -- hardcode_into_libs=yes -- -- dynamic_linker='Android linker' -- # Don't embed -rpath directories since the linker doesn't support them. -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- ;; -- - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -2866,15 +2669,14 @@ - # before this can be enabled. - hardcode_into_libs=yes - -- # Ideally, we could use ldconfig to report *all* directores which are -- # searched for libraries, however this is still not possible. Aside from not -- # being certain /sbin/ldconfig is available, command -- # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, -- # even though it is searched at run-time. Try to do the best guess by -- # appending ld.so.conf contents (and includes) to the search path. -+ # Add ABI-specific directories to the system library path. -+ sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" -+ -+ # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` -- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" -+ sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" -+ - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on -@@ -2891,12 +2693,12 @@ - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH -@@ -2906,7 +2708,7 @@ - - newsos6) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; -@@ -2915,68 +2717,58 @@ - version_type=qnx - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - --openbsd* | bitrig*) -+openbsd*) - version_type=sunos -- sys_lib_dlsearch_path_spec=/usr/lib -+ sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -- need_version=no -- else -- need_version=yes -- fi -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. -+ case $host_os in -+ openbsd3.3 | openbsd3.3.*) need_version=yes ;; -+ *) need_version=no ;; -+ esac -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ case $host_os in -+ openbsd2.[[89]] | openbsd2.[[89]].*) -+ shlibpath_overrides_runpath=no -+ ;; -+ *) -+ shlibpath_overrides_runpath=yes -+ ;; -+ esac -+ else -+ shlibpath_overrides_runpath=yes -+ fi - ;; - - os2*) - libname_spec='$name' -- version_type=windows -- shrext_cmds=.dll -- need_version=no -+ shrext_cmds=".dll" - need_lib_prefix=no -- # OS/2 can only load a DLL with a base name of 8 characters or less. -- soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; -- v=$($ECHO $release$versuffix | tr -d .-); -- n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); -- $ECHO $n$v`$shared_ext' -- library_names_spec='${libname}_dll.$libext' -+ library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' -- shlibpath_var=BEGINLIBPATH -- sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ -- dldir=$destdir/`dirname \$dlpath`~ -- test -d \$dldir || mkdir -p \$dldir~ -- $install_prog $dir/$dlname \$dldir/$dlname~ -- chmod a+x \$dldir/$dlname~ -- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then -- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; -- fi' -- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ -- dlpath=$dir/\$dldll~ -- $RM \$dlpath' -+ shlibpath_var=LIBPATH - ;; - - osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -+ sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - - rdos*) -@@ -2987,8 +2779,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -@@ -2998,11 +2790,11 @@ - - sunos4*) - version_type=sunos -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes -@@ -3010,8 +2802,8 @@ - - sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) -@@ -3032,24 +2824,24 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' -- soname_spec='$libname$shared_ext.$major' -+ library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' -+ soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) -- version_type=sco -+ version_type=freebsd-elf - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' -@@ -3067,7 +2859,7 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes -@@ -3075,8 +2867,8 @@ - - uts4*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -3085,30 +2877,20 @@ - ;; - esac - AC_MSG_RESULT([$dynamic_linker]) --test no = "$dynamic_linker" && can_build_shared=no -+test "$dynamic_linker" = no && can_build_shared=no - - variables_saved_for_relink="PATH $shlibpath_var $runpath_var" --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" - fi - --if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then -- sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec -+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then -+ sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" - fi -- --if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then -- sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec -+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then -+ sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" - fi - --# remember unaugmented sys_lib_dlsearch_path content for libtool script decls... --configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec -- --# ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code --func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" -- --# to be used as default LT_SYS_LIBRARY_PATH value in generated libtool --configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH -- - _LT_DECL([], [variables_saved_for_relink], [1], - [Variables whose values should be saved in libtool wrapper scripts and - restored at link time]) -@@ -3141,41 +2923,39 @@ - [Whether we should hardcode library paths into libraries]) - _LT_DECL([], [sys_lib_search_path_spec], [2], - [Compile-time system search path for libraries]) --_LT_DECL([sys_lib_dlsearch_path_spec], [configure_time_dlsearch_path], [2], -- [Detected run-time system search path for libraries]) --_LT_DECL([], [configure_time_lt_sys_library_path], [2], -- [Explicit LT_SYS_LIBRARY_PATH set during ./configure time]) -+_LT_DECL([], [sys_lib_dlsearch_path_spec], [2], -+ [Run-time system search path for libraries]) - ])# _LT_SYS_DYNAMIC_LINKER - - - # _LT_PATH_TOOL_PREFIX(TOOL) - # -------------------------- --# find a file program that can recognize shared library -+# find a file program which can recognize shared library - AC_DEFUN([_LT_PATH_TOOL_PREFIX], - [m4_require([_LT_DECL_EGREP])dnl - AC_MSG_CHECKING([for $1]) - AC_CACHE_VAL(lt_cv_path_MAGIC_CMD, - [case $MAGIC_CMD in - [[\\/*] | ?:[\\/]*]) -- lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. -+ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; - *) -- lt_save_MAGIC_CMD=$MAGIC_CMD -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_MAGIC_CMD="$MAGIC_CMD" -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - dnl $ac_dummy forces splitting on constant user-supplied paths. - dnl POSIX.2 word splitting is done only on the output of word expansions, - dnl not every word. This closes a longstanding sh security hole. - ac_dummy="m4_if([$2], , $PATH, [$2])" - for ac_dir in $ac_dummy; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- if test -f "$ac_dir/$1"; then -- lt_cv_path_MAGIC_CMD=$ac_dir/"$1" -+ if test -f $ac_dir/$1; then -+ lt_cv_path_MAGIC_CMD="$ac_dir/$1" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` -- MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : -@@ -3198,11 +2978,11 @@ - break - fi - done -- IFS=$lt_save_ifs -- MAGIC_CMD=$lt_save_MAGIC_CMD -+ IFS="$lt_save_ifs" -+ MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; - esac]) --MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if test -n "$MAGIC_CMD"; then - AC_MSG_RESULT($MAGIC_CMD) - else -@@ -3220,7 +3000,7 @@ - - # _LT_PATH_MAGIC - # -------------- --# find a file program that can recognize a shared library -+# find a file program which can recognize a shared library - m4_defun([_LT_PATH_MAGIC], - [_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH) - if test -z "$lt_cv_path_MAGIC_CMD"; then -@@ -3247,16 +3027,16 @@ - AC_ARG_WITH([gnu-ld], - [AS_HELP_STRING([--with-gnu-ld], - [assume the C compiler uses GNU ld @<:@default=no@:>@])], -- [test no = "$withval" || with_gnu_ld=yes], -+ [test "$withval" = no || with_gnu_ld=yes], - [with_gnu_ld=no])dnl - - ac_prog=ld --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - # Check if gcc -print-prog-name=ld gives a path. - AC_MSG_CHECKING([for ld used by $CC]) - case $host in - *-*-mingw*) -- # gcc leaves a trailing carriage return, which upsets mingw -+ # gcc leaves a trailing carriage return which upsets mingw - ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; - *) - ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; -@@ -3270,7 +3050,7 @@ - while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do - ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` - done -- test -z "$LD" && LD=$ac_prog -+ test -z "$LD" && LD="$ac_prog" - ;; - "") - # If it fails, then pretend we aren't using GCC. -@@ -3281,37 +3061,37 @@ - with_gnu_ld=unknown - ;; - esac --elif test yes = "$with_gnu_ld"; then -+elif test "$with_gnu_ld" = yes; then - AC_MSG_CHECKING([for GNU ld]) - else - AC_MSG_CHECKING([for non-GNU ld]) - fi - AC_CACHE_VAL(lt_cv_path_LD, - [if test -z "$LD"; then -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then -- lt_cv_path_LD=$ac_dir/$ac_prog -+ lt_cv_path_LD="$ac_dir/$ac_prog" - # Check to see if the program is GNU ld. I'd rather use --version, - # but apparently some variants of GNU ld only accept -v. - # Break only if it was the GNU/non-GNU ld that we prefer. - case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in - *GNU* | *'with BFD'*) -- test no != "$with_gnu_ld" && break -+ test "$with_gnu_ld" != no && break - ;; - *) -- test yes != "$with_gnu_ld" && break -+ test "$with_gnu_ld" != yes && break - ;; - esac - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - else -- lt_cv_path_LD=$LD # Let the user override the test with a path. -+ lt_cv_path_LD="$LD" # Let the user override the test with a path. - fi]) --LD=$lt_cv_path_LD -+LD="$lt_cv_path_LD" - if test -n "$LD"; then - AC_MSG_RESULT($LD) - else -@@ -3365,13 +3145,13 @@ - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - reload_cmds=false - fi - ;; - darwin*) -- if test yes = "$GCC"; then -- reload_cmds='$LTCC $LTCFLAGS -nostdlib $wl-r -o $output$reload_objs' -+ if test "$GCC" = yes; then -+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' - else - reload_cmds='$LD$reload_flag -o $output$reload_objs' - fi -@@ -3382,43 +3162,6 @@ - ])# _LT_CMD_RELOAD - - --# _LT_PATH_DD --# ----------- --# find a working dd --m4_defun([_LT_PATH_DD], --[AC_CACHE_CHECK([for a working dd], [ac_cv_path_lt_DD], --[printf 0123456789abcdef0123456789abcdef >conftest.i --cat conftest.i conftest.i >conftest2.i --: ${lt_DD:=$DD} --AC_PATH_PROGS_FEATURE_CHECK([lt_DD], [dd], --[if "$ac_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then -- cmp -s conftest.i conftest.out \ -- && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=: --fi]) --rm -f conftest.i conftest2.i conftest.out]) --])# _LT_PATH_DD -- -- --# _LT_CMD_TRUNCATE --# ---------------- --# find command to truncate a binary pipe --m4_defun([_LT_CMD_TRUNCATE], --[m4_require([_LT_PATH_DD]) --AC_CACHE_CHECK([how to truncate binary pipes], [lt_cv_truncate_bin], --[printf 0123456789abcdef0123456789abcdef >conftest.i --cat conftest.i conftest.i >conftest2.i --lt_cv_truncate_bin= --if "$ac_cv_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then -- cmp -s conftest.i conftest.out \ -- && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1" --fi --rm -f conftest.i conftest2.i conftest.out --test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q"]) --_LT_DECL([lt_truncate_bin], [lt_cv_truncate_bin], [1], -- [Command to truncate a binary pipe]) --])# _LT_CMD_TRUNCATE -- -- - # _LT_CHECK_MAGIC_METHOD - # ---------------------- - # how to check for library dependencies -@@ -3434,13 +3177,13 @@ - # Need to set the preceding variable on all platforms that support - # interlibrary dependencies. - # 'none' -- dependencies not supported. --# 'unknown' -- same as none, but documents that we really don't know. -+# `unknown' -- same as none, but documents that we really don't know. - # 'pass_all' -- all dependencies passed with no checks. - # 'test_compile' -- check by making test program. - # 'file_magic [[regex]]' -- check by looking for files in library path --# that responds to the $file_magic_cmd with a given extended regex. --# If you have 'file' or equivalent on your system and you're not sure --# whether 'pass_all' will *always* work, you probably want this one. -+# which responds to the $file_magic_cmd with a given extended regex. -+# If you have `file' or equivalent on your system and you're not sure -+# whether `pass_all' will *always* work, you probably want this one. - - case $host_os in - aix[[4-9]]*) -@@ -3467,7 +3210,8 @@ - # Base MSYS/MinGW do not provide the 'file' command needed by - # func_win32_libid shell function, so use a weaker test based on 'objdump', - # unless we find 'file', for example because we are cross-compiling. -- if ( file / ) >/dev/null 2>&1; then -+ # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. -+ if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -@@ -3503,6 +3247,10 @@ - fi - ;; - -+gnu*) -+ lt_cv_deplibs_check_method=pass_all -+ ;; -+ - haiku*) - lt_cv_deplibs_check_method=pass_all - ;; -@@ -3541,7 +3289,7 @@ - ;; - - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - -@@ -3563,8 +3311,8 @@ - lt_cv_deplibs_check_method=pass_all - ;; - --openbsd* | bitrig*) -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+openbsd*) -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' -@@ -3617,9 +3365,6 @@ - tpf*) - lt_cv_deplibs_check_method=pass_all - ;; --os2*) -- lt_cv_deplibs_check_method=pass_all -- ;; - esac - ]) - -@@ -3660,38 +3405,33 @@ - AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM, - [if test -n "$NM"; then - # Let the user override the test. -- lt_cv_path_NM=$NM -+ lt_cv_path_NM="$NM" - else -- lt_nm_to_check=${ac_tool_prefix}nm -+ lt_nm_to_check="${ac_tool_prefix}nm" - if test -n "$ac_tool_prefix" && test "$build" = "$host"; then - lt_nm_to_check="$lt_nm_to_check nm" - fi - for lt_tmp_nm in $lt_nm_to_check; do -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- tmp_nm=$ac_dir/$lt_tmp_nm -- if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then -+ tmp_nm="$ac_dir/$lt_tmp_nm" -+ if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then - # Check to see if the nm accepts a BSD-compat flag. -- # Adding the 'sed 1q' prevents false positives on HP-UX, which says: -+ # Adding the `sed 1q' prevents false positives on HP-UX, which says: - # nm: unknown option "B" ignored - # Tru64's nm complains that /dev/null is an invalid object file -- # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty -- case $build_os in -- mingw*) lt_bad_file=conftest.nm/nofile ;; -- *) lt_bad_file=/dev/null ;; -- esac -- case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in -- *$lt_bad_file* | *'Invalid file or object type'*) -+ case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in -+ */dev/null* | *'Invalid file or object type'*) - lt_cv_path_NM="$tmp_nm -B" -- break 2 -+ break - ;; - *) - case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in - */dev/null*) - lt_cv_path_NM="$tmp_nm -p" -- break 2 -+ break - ;; - *) - lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but -@@ -3702,21 +3442,21 @@ - esac - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - done - : ${lt_cv_path_NM=no} - fi]) --if test no != "$lt_cv_path_NM"; then -- NM=$lt_cv_path_NM -+if test "$lt_cv_path_NM" != "no"; then -+ NM="$lt_cv_path_NM" - else - # Didn't find any BSD compatible name lister, look for dumpbin. - if test -n "$DUMPBIN"; then : - # Let the user override the test. - else - AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :) -- case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in -+ case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in - *COFF*) -- DUMPBIN="$DUMPBIN -symbols -headers" -+ DUMPBIN="$DUMPBIN -symbols" - ;; - *) - DUMPBIN=: -@@ -3724,8 +3464,8 @@ - esac - fi - AC_SUBST([DUMPBIN]) -- if test : != "$DUMPBIN"; then -- NM=$DUMPBIN -+ if test "$DUMPBIN" != ":"; then -+ NM="$DUMPBIN" - fi - fi - test -z "$NM" && NM=nm -@@ -3771,8 +3511,8 @@ - - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) -- # two different shell functions defined in ltmain.sh; -- # decide which one to use based on capabilities of $DLLTOOL -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL - case `$DLLTOOL --help 2>&1` in - *--identify-strict*) - lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -@@ -3784,7 +3524,7 @@ - ;; - *) - # fallback: assume linklib IS sharedlib -- lt_cv_sharedlib_from_linklib_cmd=$ECHO -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" - ;; - esac - ]) -@@ -3811,28 +3551,13 @@ - lt_cv_path_mainfest_tool=yes - fi - rm -f conftest*]) --if test yes != "$lt_cv_path_mainfest_tool"; then -+if test "x$lt_cv_path_mainfest_tool" != xyes; then - MANIFEST_TOOL=: - fi - _LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl - ])# _LT_PATH_MANIFEST_TOOL - - --# _LT_DLL_DEF_P([FILE]) --# --------------------- --# True iff FILE is a Windows DLL '.def' file. --# Keep in sync with func_dll_def_p in the libtool script --AC_DEFUN([_LT_DLL_DEF_P], --[dnl -- test DEF = "`$SED -n dnl -- -e '\''s/^[[ ]]*//'\'' dnl Strip leading whitespace -- -e '\''/^\(;.*\)*$/d'\'' dnl Delete empty lines and comments -- -e '\''s/^\(EXPORTS\|LIBRARY\)\([[ ]].*\)*$/DEF/p'\'' dnl -- -e q dnl Only consider the first "real" line -- $1`" dnl --])# _LT_DLL_DEF_P -- -- - # LT_LIB_M - # -------- - # check for math library -@@ -3844,11 +3569,11 @@ - # These system don't have libm, or don't need it - ;; - *-ncr-sysv4.3*) -- AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM=-lmw) -+ AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw") - AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm") - ;; - *) -- AC_CHECK_LIB(m, cos, LIBM=-lm) -+ AC_CHECK_LIB(m, cos, LIBM="-lm") - ;; - esac - AC_SUBST([LIBM]) -@@ -3867,7 +3592,7 @@ - - _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= - --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - case $cc_basename in - nvcc*) - _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;; -@@ -3919,7 +3644,7 @@ - symcode='[[ABCDGISTW]]' - ;; - hpux*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - symcode='[[ABCDEGRST]]' - fi - ;; -@@ -3952,44 +3677,14 @@ - symcode='[[ABCDGIRSTW]]' ;; - esac - --if test "$lt_cv_nm_interface" = "MS dumpbin"; then -- # Gets list of data symbols to import. -- lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'" -- # Adjust the below global symbol transforms to fixup imported variables. -- lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'" -- lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'" -- lt_c_name_lib_hook="\ -- -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\ -- -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'" --else -- # Disable hooks by default. -- lt_cv_sys_global_symbol_to_import= -- lt_cdecl_hook= -- lt_c_name_hook= -- lt_c_name_lib_hook= --fi -- - # Transform an extracted symbol line into a proper C declaration. - # Some systems (esp. on ia64) link data and code symbols differently, - # so use this general approach. --lt_cv_sys_global_symbol_to_cdecl="sed -n"\ --$lt_cdecl_hook\ --" -e 's/^T .* \(.*\)$/extern int \1();/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'" -+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n"\ --$lt_c_name_hook\ --" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'" -- --# Transform an extracted symbol line into symbol name with lib prefix and --# symbol address. --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\ --$lt_c_name_lib_hook\ --" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ --" -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -4007,24 +3702,21 @@ - - # Write the raw and C identifiers. - if test "$lt_cv_nm_interface" = "MS dumpbin"; then -- # Fake it for dumpbin and say T for any non-static function, -- # D for any global variable and I for any imported variable. -+ # Fake it for dumpbin and say T for any non-static function -+ # and D for any global variable. - # Also find C++ and __fastcall symbols from MSVC++, - # which start with @ or ?. - lt_cv_sys_global_symbol_pipe="$AWK ['"\ - " {last_section=section; section=\$ 3};"\ - " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ - " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ --" /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\ --" /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\ --" /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\ - " \$ 0!~/External *\|/{next};"\ - " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ - " {if(hide[section]) next};"\ --" {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\ --" {split(\$ 0,a,/\||\r/); split(a[2],s)};"\ --" s[1]~/^[@?]/{print f,s[1],s[1]; next};"\ --" s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\ -+" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ -+" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ -+" s[1]~/^[@?]/{print s[1], s[1]; next};"\ -+" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ - " ' prfx=^$ac_symprfx]" - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" -@@ -4064,11 +3756,11 @@ - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext - /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ --#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE --/* DATA imports from DLLs on WIN32 can't be const, because runtime -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime - relocations are performed -- see ld's documentation on pseudo-relocs. */ - # define LT@&t@_DLSYM_CONST --#elif defined __osf__ -+#elif defined(__osf__) - /* This system does not cope well with relocations in const data. */ - # define LT@&t@_DLSYM_CONST - #else -@@ -4094,7 +3786,7 @@ - { - { "@PROGRAM@", (void *) 0 }, - _LT_EOF -- $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext -+ $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext - cat <<\_LT_EOF >> conftest.$ac_ext - {0, (void *) 0} - }; -@@ -4114,9 +3806,9 @@ - mv conftest.$ac_objext conftstm.$ac_objext - lt_globsym_save_LIBS=$LIBS - lt_globsym_save_CFLAGS=$CFLAGS -- LIBS=conftstm.$ac_objext -+ LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" -- if AC_TRY_EVAL(ac_link) && test -s conftest$ac_exeext; then -+ if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then - pipe_works=yes - fi - LIBS=$lt_globsym_save_LIBS -@@ -4137,7 +3829,7 @@ - rm -rf conftest* conftst* - - # Do not use the global_symbol_pipe unless it works. -- if test yes = "$pipe_works"; then -+ if test "$pipe_works" = yes; then - break - else - lt_cv_sys_global_symbol_pipe= -@@ -4164,16 +3856,12 @@ - [Take the output of nm and produce a listing of raw symbols and C names]) - _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], - [Transform the output of nm in a proper C declaration]) --_LT_DECL([global_symbol_to_import], [lt_cv_sys_global_symbol_to_import], [1], -- [Transform the output of nm into a list of symbols to manually relocate]) - _LT_DECL([global_symbol_to_c_name_address], - [lt_cv_sys_global_symbol_to_c_name_address], [1], - [Transform the output of nm in a C name address pair]) - _LT_DECL([global_symbol_to_c_name_address_lib_prefix], - [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], - [Transform the output of nm in a C name address pair when lib prefix is needed]) --_LT_DECL([nm_interface], [lt_cv_nm_interface], [1], -- [The name lister interface]) - _LT_DECL([], [nm_file_list_spec], [1], - [Specify filename containing input files for $NM]) - ]) # _LT_CMD_GLOBAL_SYMBOLS -@@ -4189,18 +3877,17 @@ - - m4_if([$1], [CXX], [ - # C++ specific cases for pic, static, wl, etc. -- if test yes = "$GXX"; then -+ if test "$GXX" = yes; then - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' - - case $host_os in - aix*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - - amigaos*) -@@ -4211,8 +3898,8 @@ - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but -- # adding the '-m68020' flag to GCC prevents building anything better, -- # like '-m68040'. -+ # adding the `-m68020' flag to GCC prevents building anything better, -+ # like `-m68040'. - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' - ;; - esac -@@ -4228,11 +3915,6 @@ - # (--disable-auto-import) libraries - m4_if([$1], [GCJ], [], - [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) -- case $host_os in -- os2*) -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' -- ;; -- esac - ;; - darwin* | rhapsody*) - # PIC is the default on this platform -@@ -4282,7 +3964,7 @@ - case $host_os in - aix[[4-9]]*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - else -@@ -4323,14 +4005,14 @@ - case $cc_basename in - CC*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' -- if test ia64 != "$host_cpu"; then -+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' -+ if test "$host_cpu" != ia64; then - _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' - fi - ;; - aCC*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' -+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' - case $host_cpu in - hppa*64*|ia64*) - # +Z the default -@@ -4359,7 +4041,7 @@ - ;; - esac - ;; -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in - KCC*) - # KAI C++ Compiler -@@ -4367,7 +4049,7 @@ - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - ecpc* ) -- # old Intel C++ for x86_64, which still supported -KPIC. -+ # old Intel C++ for x86_64 which still supported -KPIC. - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' -@@ -4512,18 +4194,17 @@ - fi - ], - [ -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' - - case $host_os in - aix*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - - amigaos*) -@@ -4534,8 +4215,8 @@ - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but -- # adding the '-m68020' flag to GCC prevents building anything better, -- # like '-m68040'. -+ # adding the `-m68020' flag to GCC prevents building anything better, -+ # like `-m68040'. - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' - ;; - esac -@@ -4552,11 +4233,6 @@ - # (--disable-auto-import) libraries - m4_if([$1], [GCJ], [], - [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) -- case $host_os in -- os2*) -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' -- ;; -- esac - ;; - - darwin* | rhapsody*) -@@ -4627,7 +4303,7 @@ - case $host_os in - aix*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - else -@@ -4635,30 +4311,11 @@ - fi - ;; - -- darwin* | rhapsody*) -- # PIC is the default on this platform -- # Common symbols not allowed in MH_DYLIB files -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' -- case $cc_basename in -- nagfor*) -- # NAG Fortran compiler -- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' -- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' -- ;; -- esac -- ;; -- - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - m4_if([$1], [GCJ], [], - [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) -- case $host_os in -- os2*) -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' -- ;; -- esac - ;; - - hpux9* | hpux10* | hpux11*) -@@ -4674,7 +4331,7 @@ - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' -+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) -@@ -4683,9 +4340,9 @@ - _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - ;; - -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in -- # old Intel for x86_64, which still supported -KPIC. -+ # old Intel for x86_64 which still supported -KPIC. - ecc*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' -@@ -4710,12 +4367,6 @@ - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' -- _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' -- ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -4813,7 +4464,7 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi -@@ -4842,7 +4493,7 @@ - fi - ]) - case $host_os in -- # For platforms that do not support PIC, -DPIC is meaningless: -+ # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - _LT_TAGVAR(lt_prog_compiler_pic, $1)= - ;; -@@ -4908,21 +4559,17 @@ - case $host_os in - aix[[4-9]]*) - # If we're using GNU nm, then we don't want the "-C" option. -- # -C means demangle to GNU nm, but means don't demangle to AIX nm. -- # Without the "-l" option, or with the "-B" option, AIX nm treats -- # weak defined symbols like other global defined symbols, whereas -- # GNU nm marks them as "W". -- # While the 'weak' keyword is ignored in the Export File, we need -- # it in the Import File for the 'aix-soname' feature, so we have -- # to replace the "-B" option with "-P" for AIX nm. -+ # -C means demangle to AIX nm, but means don't demangle with GNU nm -+ # Also, AIX nm treats weak defined symbols like other global defined -+ # symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then -- _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else -- _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - ;; - pw32*) -- _LT_TAGVAR(export_symbols_cmds, $1)=$ltdll_cmds -+ _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" - ;; - cygwin* | mingw* | cegcc*) - case $cc_basename in -@@ -4968,9 +4615,9 @@ - # included in the symbol list - _LT_TAGVAR(include_expsyms, $1)= - # exclude_expsyms can be an extended regexp of symbols to exclude -- # it will be wrapped by ' (' and ')$', so one must not match beginning or -- # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', -- # as well as any symbol that contains 'd'. -+ # it will be wrapped by ` (' and `)$', so one must not match beginning or -+ # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', -+ # as well as any symbol that contains `d'. - _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if -@@ -4986,7 +4633,7 @@ - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; -@@ -4994,7 +4641,7 @@ - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; -- openbsd* | bitrig*) -+ openbsd*) - with_gnu_ld=no - ;; - esac -@@ -5004,7 +4651,7 @@ - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility -@@ -5026,24 +4673,24 @@ - esac - fi - -- if test yes = "$lt_use_gnu_ld_interface"; then -+ if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty -- wlarc='$wl' -+ wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then -- _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - _LT_TAGVAR(whole_archive_flag_spec, $1)= - fi - supports_anon_versioning=no -- case `$LD -v | $SED -e 's/([^)]\+)\s\+//' 2>&1` in -+ case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... -@@ -5056,7 +4703,7 @@ - case $host_os in - aix[[3-9]]*) - # On AIX/PPC, the GNU linker is very broken -- if test ia64 != "$host_cpu"; then -+ if test "$host_cpu" != ia64; then - _LT_TAGVAR(ld_shlibs, $1)=no - cat <<_LT_EOF 1>&2 - -@@ -5075,7 +4722,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='' - ;; - m68k) -@@ -5091,7 +4738,7 @@ - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME -- _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -5101,7 +4748,7 @@ - # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, - # as there is no search path for DLLs. - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_TAGVAR(always_export_symbols, $1)=no - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -@@ -5109,89 +4756,61 @@ - _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file, use it as -- # is; otherwise, prepend EXPORTS... -- _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi - ;; - - haiku*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(link_all_deplibs, $1)=yes - ;; - -- os2*) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(hardcode_minus_L, $1)=yes -- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -- shrext_cmds=.dll -- _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -- ;; -- - interix[[3-9]]*) - _LT_TAGVAR(hardcode_direct, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no -- if test linux-dietlibc = "$host_os"; then -+ if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ -- && test no = "$tmp_diet" -+ && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; -@@ -5202,47 +4821,42 @@ - lf95*) # Lahey Fortran 8.1 - _LT_TAGVAR(whole_archive_flag_spec, $1)= - tmp_sharedflag='--shared' ;; -- nagfor*) # NAGFOR 5.3 -- tmp_sharedflag='-Wl,-shared' ;; - xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - _LT_TAGVAR(compiler_needs_object, $1)=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - _LT_TAGVAR(compiler_needs_object, $1)=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac -- _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in -- tcc*) -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='-rdynamic' -- ;; - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -5256,8 +4870,8 @@ - _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -5275,8 +4889,8 @@ - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -5288,7 +4902,7 @@ - _LT_TAGVAR(ld_shlibs, $1)=no - cat <<_LT_EOF 1>&2 - --*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot -+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not - *** reliably create shared libraries on SCO systems. Therefore, libtool - *** is disabling shared libraries support. We urge you to upgrade GNU - *** binutils to release 2.16.91.0.3 or newer. Another option is to modify -@@ -5303,9 +4917,9 @@ - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -5322,15 +4936,15 @@ - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi - ;; - esac - -- if test no = "$_LT_TAGVAR(ld_shlibs, $1)"; then -+ if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then - runpath_var= - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= - _LT_TAGVAR(export_dynamic_flag_spec, $1)= -@@ -5346,7 +4960,7 @@ - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - _LT_TAGVAR(hardcode_minus_L, $1)=yes -- if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then -+ if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - _LT_TAGVAR(hardcode_direct, $1)=unsupported -@@ -5354,57 +4968,34 @@ - ;; - - aix[[4-9]]*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' -- no_entry_flag= -+ no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. -- # -C means demangle to GNU nm, but means don't demangle to AIX nm. -- # Without the "-l" option, or with the "-B" option, AIX nm treats -- # weak defined symbols like other global defined symbols, whereas -- # GNU nm marks them as "W". -- # While the 'weak' keyword is ignored in the Export File, we need -- # it in the Import File for the 'aix-soname' feature, so we have -- # to replace the "-B" option with "-P" for AIX nm. -+ # -C means demangle to AIX nm, but means don't demangle with GNU nm -+ # Also, AIX nm treats weak defined symbols like other global -+ # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then -- _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else -- _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we -- # have runtime linking enabled, and use it for executables. -- # For shared libraries, we enable/disable runtime linking -- # depending on the kind of the shared library created - -- # when "with_aix_soname,aix_use_runtimelinking" is: -- # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables -- # "aix,yes" lib.so shared, rtl:yes, for executables -- # lib.a static archive -- # "both,no" lib.so.V(shr.o) shared, rtl:yes -- # lib.a(lib.so.V) shared, rtl:no, for executables -- # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a(lib.so.V) shared, rtl:no -- # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a static archive -+ # need to do runtime linking. - case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) - for ld_flag in $LDFLAGS; do -- if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then -+ if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done -- if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then -- # With aix-soname=svr4, we create the lib.so.V shared archives only, -- # so we don't have lib.a shared libs to link our executables. -- # We have to force runtime linking in this case. -- aix_use_runtimelinking=yes -- LDFLAGS="$LDFLAGS -Wl,-brtl" -- fi - ;; - esac - -@@ -5423,21 +5014,13 @@ - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes - _LT_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(file_list_spec, $1)='$wl-f,' -- case $with_aix_soname,$aix_use_runtimelinking in -- aix,*) ;; # traditional, no import file -- svr4,* | *,yes) # use import file -- # The Import File defines what to hardcode. -- _LT_TAGVAR(hardcode_direct, $1)=no -- _LT_TAGVAR(hardcode_direct_absolute, $1)=no -- ;; -- esac -+ _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - case $host_os in aix4.[[012]]|aix4.[[012]].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ -- collect2name=`$CC -print-prog-name=collect2` -+ collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then -@@ -5456,80 +5039,61 @@ - ;; - esac - shared_flag='-shared' -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag="$shared_flag "'$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag="$shared_flag "'${wl}-G' - fi -- # Need to ensure runtime linking is disabled for the traditional -- # shared library, or the linker may eventually find shared libraries -- # /with/ Import File - we do not want to mix them. -- shared_flag_aix='-shared' -- shared_flag_svr4='-shared $wl-G' - else - # not using gcc -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag='$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag='${wl}-G' - else -- shared_flag='$wl-bM:SRE' -+ shared_flag='${wl}-bM:SRE' - fi -- shared_flag_aix='$wl-bM:SRE' -- shared_flag_svr4='$wl-G' - fi - fi - -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - _LT_TAGVAR(always_export_symbols, $1)=yes -- if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then -+ if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - _LT_TAGVAR(allow_undefined_flag, $1)='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. - _LT_SYS_MODULE_PATH_AIX([$1]) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -- if test ia64 = "$host_cpu"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib' -+ if test "$host_cpu" = ia64; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' - _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" -- _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" -+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. - _LT_SYS_MODULE_PATH_AIX([$1]) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -- _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok' -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok' -- if test yes = "$with_gnu_ld"; then -+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' -+ if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' - fi - _LT_TAGVAR(archive_cmds_need_lc, $1)=yes -- _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' -- # -brtl affects multiple linker settings, -berok does not and is overridden later -- compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`' -- if test svr4 != "$with_aix_soname"; then -- # This is similar to how AIX traditionally builds its shared libraries. -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' -- fi -- if test aix != "$with_aix_soname"; then -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' -- else -- # used by -dlpreopen to get the symbols -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir' -- fi -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d' -+ # This is similar to how AIX traditionally builds its shared libraries. -+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; -@@ -5538,7 +5102,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='' - ;; - m68k) -@@ -5568,17 +5132,16 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. -- _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' -- _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then -- cp "$export_symbols" "$output_objdir/$soname.def"; -- echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; -- else -- $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; -- fi~ -- $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -- linknames=' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -@@ -5587,18 +5150,18 @@ - # Don't use ranlib - _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' - _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ -- lt_tool_outputfile="@TOOL_OUTPUT@"~ -- case $lt_outputfile in -- *.exe|*.EXE) ;; -- *) -- lt_outputfile=$lt_outputfile.exe -- lt_tool_outputfile=$lt_tool_outputfile.exe -- ;; -- esac~ -- if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then -- $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -- $RM "$lt_outputfile.manifest"; -- fi' -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' - ;; - *) - # Assume MSVC wrapper -@@ -5607,7 +5170,7 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. -@@ -5657,33 +5220,33 @@ - ;; - - hpux9*) -- if test yes = "$GCC"; then -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(hardcode_direct, $1)=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - _LT_TAGVAR(hardcode_minus_L, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - ;; - - hpux10*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -- if test no = "$with_gnu_ld"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - _LT_TAGVAR(hardcode_minus_L, $1)=yes -@@ -5691,25 +5254,25 @@ - ;; - - hpux11*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - m4_if($1, [], [ -@@ -5717,14 +5280,14 @@ - # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) - _LT_LINKER_OPTION([if $CC understands -b], - _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b], -- [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'], -+ [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'], - [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])], -- [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags']) -+ [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags']) - ;; - esac - fi -- if test no = "$with_gnu_ld"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - - case $host_cpu in -@@ -5735,7 +5298,7 @@ - *) - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. -@@ -5746,16 +5309,16 @@ - ;; - - irix5* | irix6* | nonstopux*) -- if test yes = "$GCC"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. - # This should be the same for all languages, so no per-tag cache variable. - AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol], - [lt_cv_irix_exported_symbol], -- [save_LDFLAGS=$LDFLAGS -- LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" -+ [save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - AC_LINK_IFELSE( - [AC_LANG_SOURCE( - [AC_LANG_CASE([C], [[int foo (void) { return 0; }]], -@@ -5768,31 +5331,21 @@ - end]])])], - [lt_cv_irix_exported_symbol=yes], - [lt_cv_irix_exported_symbol=no]) -- LDFLAGS=$save_LDFLAGS]) -- if test yes = "$lt_cv_irix_exported_symbol"; then -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' -+ LDFLAGS="$save_LDFLAGS"]) -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - _LT_TAGVAR(archive_cmds_need_lc, $1)='no' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(inherit_rpath, $1)=yes - _LT_TAGVAR(link_all_deplibs, $1)=yes - ;; - -- linux*) -- case $cc_basename in -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- _LT_TAGVAR(ld_shlibs, $1)=yes -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- ;; -- esac -- ;; -- - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out -@@ -5807,7 +5360,7 @@ - newsos6) - _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_TAGVAR(hardcode_direct, $1)=yes -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; -@@ -5815,19 +5368,27 @@ - *nto* | *qnx*) - ;; - -- openbsd* | bitrig*) -+ openbsd*) - if test -f /usr/libexec/ld.so; then - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -+ case $host_os in -+ openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*) -+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' -+ ;; -+ *) -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ ;; -+ esac - fi - else - _LT_TAGVAR(ld_shlibs, $1)=no -@@ -5838,53 +5399,33 @@ - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_TAGVAR(hardcode_minus_L, $1)=yes - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -- shrext_cmds=.dll -- _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -+ _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' -+ _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) -- if test yes = "$GCC"; then -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - _LT_TAGVAR(archive_cmds_need_lc, $1)='no' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag -- if test yes = "$GCC"; then -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - else - _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ -- $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' -+ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' -@@ -5895,24 +5436,24 @@ - - solaris*) - _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' -- if test yes = "$GCC"; then -- wlarc='$wl' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ wlarc='${wl}' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' -- _LT_TAGVAR(archive_cmds, $1)='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' -+ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) -- wlarc='$wl' -- _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ wlarc='${wl}' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi -@@ -5922,11 +5463,11 @@ - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) - # The compiler driver will combine and reorder linker options, -- # but understands '-z linker_flag'. GCC discards it without '$wl', -+ # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) -- if test yes = "$GCC"; then -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' - fi -@@ -5936,10 +5477,10 @@ - ;; - - sunos4*) -- if test sequent = "$host_vendor"; then -+ if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -5988,43 +5529,43 @@ - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) -- _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' -+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' - _LT_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) -- # Note: We CANNOT use -z defs as we might desire, because we do not -+ # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. -- _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' -- _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs' -+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' -+ _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' - _LT_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - -@@ -6039,17 +5580,17 @@ - ;; - esac - -- if test sni = "$host_vendor"; then -+ if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Blargedynsym' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym' - ;; - esac - fi - fi - ]) - AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) --test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no -+test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no - - _LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld - -@@ -6066,7 +5607,7 @@ - # Assume -lc should be added - _LT_TAGVAR(archive_cmds_need_lc, $1)=yes - -- if test yes,yes = "$GCC,$enable_shared"; then -+ if test "$enable_shared" = yes && test "$GCC" = yes; then - case $_LT_TAGVAR(archive_cmds, $1) in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. -@@ -6146,12 +5687,12 @@ - _LT_TAGDECL([], [hardcode_libdir_separator], [1], - [Whether we need a single "-rpath" flag with a separated argument]) - _LT_TAGDECL([], [hardcode_direct], [0], -- [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes -+ [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes - DIR into the resulting binary]) - _LT_TAGDECL([], [hardcode_direct_absolute], [0], -- [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes -+ [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes - DIR into the resulting binary and the resulting library dependency is -- "absolute", i.e impossible to change by setting $shlibpath_var if the -+ "absolute", i.e impossible to change by setting ${shlibpath_var} if the - library is relocated]) - _LT_TAGDECL([], [hardcode_minus_L], [0], - [Set to "yes" if using the -LDIR flag during linking hardcodes DIR -@@ -6192,10 +5733,10 @@ - # ------------------------ - # Ensure that the configuration variables for a C compiler are suitably - # defined. These variables are subsequently used by _LT_CONFIG to write --# the compiler configuration to 'libtool'. -+# the compiler configuration to `libtool'. - m4_defun([_LT_LANG_C_CONFIG], - [m4_require([_LT_DECL_EGREP])dnl --lt_save_CC=$CC -+lt_save_CC="$CC" - AC_LANG_PUSH(C) - - # Source file extension for C test sources. -@@ -6235,18 +5776,18 @@ - LT_SYS_DLOPEN_SELF - _LT_CMD_STRIPLIB - -- # Report what library types will actually be built -+ # Report which library types will actually be built - AC_MSG_CHECKING([if libtool supports shared libraries]) - AC_MSG_RESULT([$can_build_shared]) - - AC_MSG_CHECKING([whether to build shared libraries]) -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' -@@ -6254,12 +5795,8 @@ - ;; - - aix[[4-9]]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -6267,13 +5804,13 @@ - - AC_MSG_CHECKING([whether to build static libraries]) - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - AC_MSG_RESULT([$enable_static]) - - _LT_CONFIG($1) - fi - AC_LANG_POP --CC=$lt_save_CC -+CC="$lt_save_CC" - ])# _LT_LANG_C_CONFIG - - -@@ -6281,14 +5818,14 @@ - # -------------------------- - # Ensure that the configuration variables for a C++ compiler are suitably - # defined. These variables are subsequently used by _LT_CONFIG to write --# the compiler configuration to 'libtool'. -+# the compiler configuration to `libtool'. - m4_defun([_LT_LANG_CXX_CONFIG], - [m4_require([_LT_FILEUTILS_DEFAULTS])dnl - m4_require([_LT_DECL_EGREP])dnl - m4_require([_LT_PATH_MANIFEST_TOOL])dnl --if test -n "$CXX" && ( test no != "$CXX" && -- ( (test g++ = "$CXX" && `g++ -v >/dev/null 2>&1` ) || -- (test g++ != "$CXX"))); then -+if test -n "$CXX" && ( test "X$CXX" != "Xno" && -+ ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || -+ (test "X$CXX" != "Xg++"))) ; then - AC_PROG_CXXCPP - else - _lt_caught_CXX_error=yes -@@ -6330,7 +5867,7 @@ - # the CXX compiler isn't working. Some variables (like enable_shared) - # are currently assumed to apply to all compilers on this platform, - # and will be corrupted by setting them based on a non-working compiler. --if test yes != "$_lt_caught_CXX_error"; then -+if test "$_lt_caught_CXX_error" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="int some_variable = 0;" - -@@ -6372,35 +5909,35 @@ - if test -n "$compiler"; then - # We don't want -fno-exception when compiling C++ code, so set the - # no_builtin_flag separately -- if test yes = "$GXX"; then -+ if test "$GXX" = yes; then - _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' - else - _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= - fi - -- if test yes = "$GXX"; then -+ if test "$GXX" = yes; then - # Set up default GNU C++ configuration - - LT_PATH_LD - - # Check if GNU C++ uses GNU ld as the underlying linker, since the - # archiving commands below assume that GNU ld is being used. -- if test yes = "$with_gnu_ld"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ if test "$with_gnu_ld" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - - # If archive_cmds runs LD, not CC, wlarc should be empty - # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to - # investigate it a little bit more. (MM) -- wlarc='$wl' -+ wlarc='${wl}' - - # ancient GNU ld didn't support --whole-archive et. al. - if eval "`$CC -print-prog-name=ld` --help 2>&1" | - $GREP 'no-whole-archive' > /dev/null; then -- _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - _LT_TAGVAR(whole_archive_flag_spec, $1)= - fi -@@ -6436,30 +5973,18 @@ - _LT_TAGVAR(ld_shlibs, $1)=no - ;; - aix[[4-9]]*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' -- no_entry_flag= -+ no_entry_flag="" - else - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we -- # have runtime linking enabled, and use it for executables. -- # For shared libraries, we enable/disable runtime linking -- # depending on the kind of the shared library created - -- # when "with_aix_soname,aix_use_runtimelinking" is: -- # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables -- # "aix,yes" lib.so shared, rtl:yes, for executables -- # lib.a static archive -- # "both,no" lib.so.V(shr.o) shared, rtl:yes -- # lib.a(lib.so.V) shared, rtl:no, for executables -- # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a(lib.so.V) shared, rtl:no -- # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a static archive -+ # need to do runtime linking. - case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) - for ld_flag in $LDFLAGS; do - case $ld_flag in -@@ -6469,13 +5994,6 @@ - ;; - esac - done -- if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then -- # With aix-soname=svr4, we create the lib.so.V shared archives only, -- # so we don't have lib.a shared libs to link our executables. -- # We have to force runtime linking in this case. -- aix_use_runtimelinking=yes -- LDFLAGS="$LDFLAGS -Wl,-brtl" -- fi - ;; - esac - -@@ -6494,21 +6012,13 @@ - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes - _LT_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(file_list_spec, $1)='$wl-f,' -- case $with_aix_soname,$aix_use_runtimelinking in -- aix,*) ;; # no import file -- svr4,* | *,yes) # use import file -- # The Import File defines what to hardcode. -- _LT_TAGVAR(hardcode_direct, $1)=no -- _LT_TAGVAR(hardcode_direct_absolute, $1)=no -- ;; -- esac -+ _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' - -- if test yes = "$GXX"; then -+ if test "$GXX" = yes; then - case $host_os in aix4.[[012]]|aix4.[[012]].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ -- collect2name=`$CC -print-prog-name=collect2` -+ collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then -@@ -6526,84 +6036,64 @@ - fi - esac - shared_flag='-shared' -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag=$shared_flag' $wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag="$shared_flag "'${wl}-G' - fi -- # Need to ensure runtime linking is disabled for the traditional -- # shared library, or the linker may eventually find shared libraries -- # /with/ Import File - we do not want to mix them. -- shared_flag_aix='-shared' -- shared_flag_svr4='-shared $wl-G' - else - # not using gcc -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag='$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag='${wl}-G' - else -- shared_flag='$wl-bM:SRE' -+ shared_flag='${wl}-bM:SRE' - fi -- shared_flag_aix='$wl-bM:SRE' -- shared_flag_svr4='$wl-G' - fi - fi - -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to - # export. - _LT_TAGVAR(always_export_symbols, $1)=yes -- if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then -+ if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. -- # The "-G" linker flag allows undefined symbols. -- _LT_TAGVAR(no_undefined_flag, $1)='-bernotok' -+ _LT_TAGVAR(allow_undefined_flag, $1)='-berok' - # Determine the default libpath from the value encoded in an empty - # executable. - _LT_SYS_MODULE_PATH_AIX([$1]) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -- if test ia64 = "$host_cpu"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib' -+ if test "$host_cpu" = ia64; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' - _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" -- _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" -+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. - _LT_SYS_MODULE_PATH_AIX([$1]) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -- _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok' -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok' -- if test yes = "$with_gnu_ld"; then -+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' -+ if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' - fi - _LT_TAGVAR(archive_cmds_need_lc, $1)=yes -- _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' -- # -brtl affects multiple linker settings, -berok does not and is overridden later -- compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`' -- if test svr4 != "$with_aix_soname"; then -- # This is similar to how AIX traditionally builds its shared -- # libraries. Need -bnortl late, we may have -brtl in LDFLAGS. -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' -- fi -- if test aix != "$with_aix_soname"; then -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' -- else -- # used by -dlpreopen to get the symbols -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir' -- fi -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d' -+ # This is similar to how AIX traditionally builds its shared -+ # libraries. -+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; -@@ -6613,7 +6103,7 @@ - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME -- _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -6641,58 +6131,57 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. -- _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' -- _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then -- cp "$export_symbols" "$output_objdir/$soname.def"; -- echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; -- else -- $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; -- fi~ -- $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -- linknames=' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - # Don't use ranlib - _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' - _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ -- lt_tool_outputfile="@TOOL_OUTPUT@"~ -- case $lt_outputfile in -- *.exe|*.EXE) ;; -- *) -- lt_outputfile=$lt_outputfile.exe -- lt_tool_outputfile=$lt_tool_outputfile.exe -- ;; -- esac~ -- func_to_tool_file "$lt_outputfile"~ -- if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then -- $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -- $RM "$lt_outputfile.manifest"; -- fi' -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ func_to_tool_file "$lt_outputfile"~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' - ;; - *) - # g++ - # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, - # as there is no search path for DLLs. - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_TAGVAR(always_export_symbols, $1)=no - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file, use it as -- # is; otherwise, prepend EXPORTS... -- _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -6703,34 +6192,6 @@ - _LT_DARWIN_LINKER_FEATURES($1) - ;; - -- os2*) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(hardcode_minus_L, $1)=yes -- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -- shrext_cmds=.dll -- _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -- ;; -- - dgux*) - case $cc_basename in - ec++*) -@@ -6765,15 +6226,18 @@ - _LT_TAGVAR(ld_shlibs, $1)=yes - ;; - -+ gnu*) -+ ;; -+ - haiku*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(link_all_deplibs, $1)=yes - ;; - - hpux9*) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, - # but as the default -@@ -6785,7 +6249,7 @@ - _LT_TAGVAR(ld_shlibs, $1)=no - ;; - aCC*) -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. -@@ -6794,11 +6258,11 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' -+ output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' - ;; - *) -- if test yes = "$GXX"; then -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ if test "$GXX" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - # FIXME: insert proper C++ library support - _LT_TAGVAR(ld_shlibs, $1)=no -@@ -6808,15 +6272,15 @@ - ;; - - hpux10*|hpux11*) -- if test no = "$with_gnu_ld"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ if test $with_gnu_ld = no; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - - case $host_cpu in - hppa*64*|ia64*) - ;; - *) -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - ;; - esac - fi -@@ -6842,13 +6306,13 @@ - aCC*) - case $host_cpu in - hppa*64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - esac - # Commands to make compiler produce verbose output that lists -@@ -6859,20 +6323,20 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' -+ output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' - ;; - *) -- if test yes = "$GXX"; then -- if test no = "$with_gnu_ld"; then -+ if test "$GXX" = yes; then -+ if test $with_gnu_ld = no; then - case $host_cpu in - hppa*64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - esac - fi -@@ -6887,22 +6351,22 @@ - interix[[3-9]]*) - _LT_TAGVAR(hardcode_direct, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - irix5* | irix6*) - case $cc_basename in - CC*) - # SGI C++ -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - - # Archives containing C++ object files must be created using - # "CC -ar", where "CC" is the IRIX C++ compiler. This is -@@ -6911,22 +6375,22 @@ - _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs' - ;; - *) -- if test yes = "$GXX"; then -- if test no = "$with_gnu_ld"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GXX" = yes; then -+ if test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' - fi - fi - _LT_TAGVAR(link_all_deplibs, $1)=yes - ;; - esac -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(inherit_rpath, $1)=yes - ;; - -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in - KCC*) - # Kuck and Associates, Inc. (KAI) C++ Compiler -@@ -6934,8 +6398,8 @@ - # KCC will only create a shared library if the output file - # ends with ".so" (or ".sl" for HP-UX), so rename the library - # to its proper name (with version) after linking. -- _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib $wl-retain-symbols-file,$export_symbols; mv \$templib $lib' -+ _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. -@@ -6944,10 +6408,10 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' -+ output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - - # Archives containing C++ object files must be created using - # "CC -Bstatic", where "CC" is the KAI C++ compiler. -@@ -6961,59 +6425,59 @@ - # earlier do not add the objects themselves. - case `$CC -V 2>&1` in - *"Version 7."*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - ;; - *) # Version 8.0 or newer - tmp_idyn= - case $host_cpu in - ia64*) tmp_idyn=' -i_dynamic';; - esac -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - ;; - esac - _LT_TAGVAR(archive_cmds_need_lc, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - ;; - pgCC* | pgcpp*) - # Portland Group C++ compiler - case `$CC -V` in - *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*) - _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ -- rm -rf $tpldir~ -- $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ -- compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' -+ rm -rf $tpldir~ -+ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ -+ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' - _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ -- rm -rf $tpldir~ -- $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ -- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ -- $RANLIB $oldlib' -+ rm -rf $tpldir~ -+ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ -+ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ -+ $RANLIB $oldlib' - _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ -- rm -rf $tpldir~ -- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -+ rm -rf $tpldir~ -+ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ -- rm -rf $tpldir~ -- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ rm -rf $tpldir~ -+ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' - ;; - *) # Version 6 and above use weak symbols -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' - ;; - esac - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl--rpath $wl$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - ;; - cxx*) - # Compaq C++ -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib $wl-retain-symbols-file $wl$export_symbols' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' - - runpath_var=LD_RUN_PATH - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' -@@ -7027,18 +6491,18 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' -+ output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' - ;; - xl* | mpixl* | bgxl*) - # IBM XL 8.0 on PPC, with GNU ld -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -- _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- if test yes = "$supports_anon_versioning"; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ if test "x$supports_anon_versioning" = xyes; then - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - ;; - *) -@@ -7046,10 +6510,10 @@ - *Sun\ C*) - # Sun C++ 5.9 - _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' -- _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file $wl$export_symbols' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols' - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - _LT_TAGVAR(compiler_needs_object, $1)=yes - - # Not sure whether something based on -@@ -7107,17 +6571,22 @@ - _LT_TAGVAR(ld_shlibs, $1)=yes - ;; - -- openbsd* | bitrig*) -+ openbsd2*) -+ # C++ shared libraries are fairly broken -+ _LT_TAGVAR(ld_shlibs, $1)=no -+ ;; -+ -+ openbsd*) - if test -f /usr/libexec/ld.so; then - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`"; then -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file,$export_symbols -o $lib' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -- _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - fi - output_verbose_link_cmd=func_echo_all - else -@@ -7133,9 +6602,9 @@ - # KCC will only create a shared library if the output file - # ends with ".so" (or ".sl" for HP-UX), so rename the library - # to its proper name (with version) after linking. -- _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' -+ _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - - # Archives containing C++ object files must be created using -@@ -7153,17 +6622,17 @@ - cxx*) - case $host in - osf3*) -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $soname `test -n "$verstring" && func_echo_all "$wl-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - ;; - *) - _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ -- echo "-hidden">> $lib.exp~ -- $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname $wl-input $wl$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~ -- $RM $lib.exp' -+ echo "-hidden">> $lib.exp~ -+ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~ -+ $RM $lib.exp' - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' - ;; - esac -@@ -7178,21 +6647,21 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' -+ output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' - ;; - *) -- if test yes,no = "$GXX,$with_gnu_ld"; then -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' -+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' - case $host in - osf3*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - ;; - esac - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - - # Commands to make compiler produce verbose output that lists -@@ -7238,9 +6707,9 @@ - # Sun C++ 4.2, 5.x and Centerline C++ - _LT_TAGVAR(archive_cmds_need_lc,$1)=yes - _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' -- _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G$allow_undefined_flag $wl-M $wl$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' -+ $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' - - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -@@ -7248,7 +6717,7 @@ - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) - # The compiler driver will combine and reorder linker options, -- # but understands '-z linker_flag'. -+ # but understands `-z linker_flag'. - # Supported since Solaris 2.6 (maybe 2.5.1?) - _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' - ;; -@@ -7265,30 +6734,30 @@ - ;; - gcx*) - # Green Hills C++ Compiler -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - - # The C++ compiler must be used to create the archive. - _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs' - ;; - *) - # GNU C++ compiler with Solaris linker -- if test yes,no = "$GXX,$with_gnu_ld"; then -- _LT_TAGVAR(no_undefined_flag, $1)=' $wl-z ${wl}defs' -+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' - if $CC --version | $GREP -v '^2\.7' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared $pic_flag -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' - else -- # g++ 2.7 appears to require '-G' NOT '-shared' on this -+ # g++ 2.7 appears to require `-G' NOT `-shared' on this - # platform. -- _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' -+ $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when -@@ -7296,11 +6765,11 @@ - output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' - fi - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir' - case $host_os in - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - ;; - esac - fi -@@ -7309,52 +6778,52 @@ - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) -- _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' -+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' - _LT_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - runpath_var='LD_RUN_PATH' - - case $cc_basename in - CC*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - ;; - - sysv5* | sco3.2v5* | sco5v6*) -- # Note: We CANNOT use -z defs as we might desire, because we do not -+ # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. -- _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' -- _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs' -+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' -+ _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' - _LT_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - - case $cc_basename in - CC*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~ -- '"$_LT_TAGVAR(old_archive_cmds, $1)" -+ '"$_LT_TAGVAR(old_archive_cmds, $1)" - _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~ -- '"$_LT_TAGVAR(reload_cmds, $1)" -+ '"$_LT_TAGVAR(reload_cmds, $1)" - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - ;; -@@ -7385,10 +6854,10 @@ - esac - - AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) -- test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no -+ test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no - -- _LT_TAGVAR(GCC, $1)=$GXX -- _LT_TAGVAR(LD, $1)=$LD -+ _LT_TAGVAR(GCC, $1)="$GXX" -+ _LT_TAGVAR(LD, $1)="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change -@@ -7415,7 +6884,7 @@ - lt_cv_path_LD=$lt_save_path_LD - lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld - lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld --fi # test yes != "$_lt_caught_CXX_error" -+fi # test "$_lt_caught_CXX_error" != yes - - AC_LANG_POP - ])# _LT_LANG_CXX_CONFIG -@@ -7437,14 +6906,13 @@ - AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH]) - func_stripname_cnf () - { -- case @S|@2 in -- .*) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%\\\\@S|@2\$%%"`;; -- *) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%@S|@2\$%%"`;; -+ case ${2} in -+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; - esac - } # func_stripname_cnf - ])# _LT_FUNC_STRIPNAME_CNF - -- - # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) - # --------------------------------- - # Figure out "hidden" library dependencies from verbose -@@ -7528,13 +6996,13 @@ - pre_test_object_deps_done=no - - for p in `eval "$output_verbose_link_cmd"`; do -- case $prev$p in -+ case ${prev}${p} in - - -L* | -R* | -l*) - # Some compilers place space between "-{L,R}" and the path. - # Remove the space. -- if test x-L = "$p" || -- test x-R = "$p"; then -+ if test $p = "-L" || -+ test $p = "-R"; then - prev=$p - continue - fi -@@ -7550,16 +7018,16 @@ - case $p in - =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; - esac -- if test no = "$pre_test_object_deps_done"; then -- case $prev in -+ if test "$pre_test_object_deps_done" = no; then -+ case ${prev} in - -L | -R) - # Internal compiler library paths should come after those - # provided the user. The postdeps already come after the - # user supplied libs so there is no need to process them. - if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then -- _LT_TAGVAR(compiler_lib_search_path, $1)=$prev$p -+ _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}" - else -- _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} $prev$p" -+ _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}" - fi - ;; - # The "-l" case would never come before the object being -@@ -7567,9 +7035,9 @@ - esac - else - if test -z "$_LT_TAGVAR(postdeps, $1)"; then -- _LT_TAGVAR(postdeps, $1)=$prev$p -+ _LT_TAGVAR(postdeps, $1)="${prev}${p}" - else -- _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} $prev$p" -+ _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}" - fi - fi - prev= -@@ -7584,15 +7052,15 @@ - continue - fi - -- if test no = "$pre_test_object_deps_done"; then -+ if test "$pre_test_object_deps_done" = no; then - if test -z "$_LT_TAGVAR(predep_objects, $1)"; then -- _LT_TAGVAR(predep_objects, $1)=$p -+ _LT_TAGVAR(predep_objects, $1)="$p" - else - _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p" - fi - else - if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then -- _LT_TAGVAR(postdep_objects, $1)=$p -+ _LT_TAGVAR(postdep_objects, $1)="$p" - else - _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p" - fi -@@ -7623,6 +7091,51 @@ - _LT_TAGVAR(postdep_objects,$1)= - _LT_TAGVAR(postdeps,$1)= - ;; -+ -+linux*) -+ case `$CC -V 2>&1 | sed 5q` in -+ *Sun\ C*) -+ # Sun C++ 5.9 -+ -+ # The more standards-conforming stlport4 library is -+ # incompatible with the Cstd library. Avoid specifying -+ # it if it's in CXXFLAGS. Ignore libCrun as -+ # -library=stlport4 depends on it. -+ case " $CXX $CXXFLAGS " in -+ *" -library=stlport4 "*) -+ solaris_use_stlport4=yes -+ ;; -+ esac -+ -+ if test "$solaris_use_stlport4" != yes; then -+ _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' -+ fi -+ ;; -+ esac -+ ;; -+ -+solaris*) -+ case $cc_basename in -+ CC* | sunCC*) -+ # The more standards-conforming stlport4 library is -+ # incompatible with the Cstd library. Avoid specifying -+ # it if it's in CXXFLAGS. Ignore libCrun as -+ # -library=stlport4 depends on it. -+ case " $CXX $CXXFLAGS " in -+ *" -library=stlport4 "*) -+ solaris_use_stlport4=yes -+ ;; -+ esac -+ -+ # Adding this requires a known-good setup of shared libraries for -+ # Sun compiler versions before 5.6, else PIC objects from an old -+ # archive will be linked into the output, leading to subtle bugs. -+ if test "$solaris_use_stlport4" != yes; then -+ _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' -+ fi -+ ;; -+ esac -+ ;; - esac - ]) - -@@ -7631,7 +7144,7 @@ - esac - _LT_TAGVAR(compiler_lib_search_dirs, $1)= - if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then -- _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | $SED -e 's! -L! !g' -e 's!^ !!'` -+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` - fi - _LT_TAGDECL([], [compiler_lib_search_dirs], [1], - [The directories searched by this compiler when creating a shared library]) -@@ -7651,10 +7164,10 @@ - # -------------------------- - # Ensure that the configuration variables for a Fortran 77 compiler are - # suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_F77_CONFIG], - [AC_LANG_PUSH(Fortran 77) --if test -z "$F77" || test no = "$F77"; then -+if test -z "$F77" || test "X$F77" = "Xno"; then - _lt_disable_F77=yes - fi - -@@ -7691,7 +7204,7 @@ - # the F77 compiler isn't working. Some variables (like enable_shared) - # are currently assumed to apply to all compilers on this platform, - # and will be corrupted by setting them based on a non-working compiler. --if test yes != "$_lt_disable_F77"; then -+if test "$_lt_disable_F77" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="\ - subroutine t -@@ -7713,7 +7226,7 @@ - _LT_LINKER_BOILERPLATE - - # Allow CC to be a program name with arguments. -- lt_save_CC=$CC -+ lt_save_CC="$CC" - lt_save_GCC=$GCC - lt_save_CFLAGS=$CFLAGS - CC=${F77-"f77"} -@@ -7727,25 +7240,21 @@ - AC_MSG_RESULT([$can_build_shared]) - - AC_MSG_CHECKING([whether to build shared libraries]) -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - aix[[4-9]]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -7753,11 +7262,11 @@ - - AC_MSG_CHECKING([whether to build static libraries]) - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - AC_MSG_RESULT([$enable_static]) - -- _LT_TAGVAR(GCC, $1)=$G77 -- _LT_TAGVAR(LD, $1)=$LD -+ _LT_TAGVAR(GCC, $1)="$G77" -+ _LT_TAGVAR(LD, $1)="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change -@@ -7774,9 +7283,9 @@ - fi # test -n "$compiler" - - GCC=$lt_save_GCC -- CC=$lt_save_CC -- CFLAGS=$lt_save_CFLAGS --fi # test yes != "$_lt_disable_F77" -+ CC="$lt_save_CC" -+ CFLAGS="$lt_save_CFLAGS" -+fi # test "$_lt_disable_F77" != yes - - AC_LANG_POP - ])# _LT_LANG_F77_CONFIG -@@ -7786,11 +7295,11 @@ - # ------------------------- - # Ensure that the configuration variables for a Fortran compiler are - # suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_FC_CONFIG], - [AC_LANG_PUSH(Fortran) - --if test -z "$FC" || test no = "$FC"; then -+if test -z "$FC" || test "X$FC" = "Xno"; then - _lt_disable_FC=yes - fi - -@@ -7827,7 +7336,7 @@ - # the FC compiler isn't working. Some variables (like enable_shared) - # are currently assumed to apply to all compilers on this platform, - # and will be corrupted by setting them based on a non-working compiler. --if test yes != "$_lt_disable_FC"; then -+if test "$_lt_disable_FC" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="\ - subroutine t -@@ -7849,7 +7358,7 @@ - _LT_LINKER_BOILERPLATE - - # Allow CC to be a program name with arguments. -- lt_save_CC=$CC -+ lt_save_CC="$CC" - lt_save_GCC=$GCC - lt_save_CFLAGS=$CFLAGS - CC=${FC-"f95"} -@@ -7865,25 +7374,21 @@ - AC_MSG_RESULT([$can_build_shared]) - - AC_MSG_CHECKING([whether to build shared libraries]) -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - aix[[4-9]]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -7891,11 +7396,11 @@ - - AC_MSG_CHECKING([whether to build static libraries]) - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - AC_MSG_RESULT([$enable_static]) - -- _LT_TAGVAR(GCC, $1)=$ac_cv_fc_compiler_gnu -- _LT_TAGVAR(LD, $1)=$LD -+ _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu" -+ _LT_TAGVAR(LD, $1)="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change -@@ -7915,7 +7420,7 @@ - GCC=$lt_save_GCC - CC=$lt_save_CC - CFLAGS=$lt_save_CFLAGS --fi # test yes != "$_lt_disable_FC" -+fi # test "$_lt_disable_FC" != yes - - AC_LANG_POP - ])# _LT_LANG_FC_CONFIG -@@ -7925,7 +7430,7 @@ - # -------------------------- - # Ensure that the configuration variables for the GNU Java Compiler compiler - # are suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_GCJ_CONFIG], - [AC_REQUIRE([LT_PROG_GCJ])dnl - AC_LANG_SAVE -@@ -7959,7 +7464,7 @@ - CFLAGS=$GCJFLAGS - compiler=$CC - _LT_TAGVAR(compiler, $1)=$CC --_LT_TAGVAR(LD, $1)=$LD -+_LT_TAGVAR(LD, $1)="$LD" - _LT_CC_BASENAME([$compiler]) - - # GCJ did not exist at the time GCC didn't implicitly link libc in. -@@ -7996,7 +7501,7 @@ - # -------------------------- - # Ensure that the configuration variables for the GNU Go compiler - # are suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_GO_CONFIG], - [AC_REQUIRE([LT_PROG_GO])dnl - AC_LANG_SAVE -@@ -8030,7 +7535,7 @@ - CFLAGS=$GOFLAGS - compiler=$CC - _LT_TAGVAR(compiler, $1)=$CC --_LT_TAGVAR(LD, $1)=$LD -+_LT_TAGVAR(LD, $1)="$LD" - _LT_CC_BASENAME([$compiler]) - - # Go did not exist at the time GCC didn't implicitly link libc in. -@@ -8067,7 +7572,7 @@ - # ------------------------- - # Ensure that the configuration variables for the Windows resource compiler - # are suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_RC_CONFIG], - [AC_REQUIRE([LT_PROG_RC])dnl - AC_LANG_SAVE -@@ -8083,7 +7588,7 @@ - lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }' - - # Code to be used in simple link tests --lt_simple_link_test_code=$lt_simple_compile_test_code -+lt_simple_link_test_code="$lt_simple_compile_test_code" - - # ltmain only uses $CC for tagged configurations so make sure $CC is set. - _LT_TAG_COMPILER -@@ -8093,7 +7598,7 @@ - _LT_LINKER_BOILERPLATE - - # Allow CC to be a program name with arguments. --lt_save_CC=$CC -+lt_save_CC="$CC" - lt_save_CFLAGS=$CFLAGS - lt_save_GCC=$GCC - GCC= -@@ -8122,7 +7627,7 @@ - [m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ], - [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ], - [AC_CHECK_TOOL(GCJ, gcj,) -- test set = "${GCJFLAGS+set}" || GCJFLAGS="-g -O2" -+ test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2" - AC_SUBST(GCJFLAGS)])])[]dnl - ]) - -@@ -8233,7 +7738,7 @@ - # Add /usr/xpg4/bin/sed as it is typically found on Solaris - # along with /bin/sed that truncates output. - for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do -- test ! -f "$lt_ac_sed" && continue -+ test ! -f $lt_ac_sed && continue - cat /dev/null > conftest.in - lt_ac_count=0 - echo $ECHO_N "0123456789$ECHO_C" >conftest.in -@@ -8250,9 +7755,9 @@ - $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break - cmp -s conftest.out conftest.nl || break - # 10000 chars as input seems more than enough -- test 10 -lt "$lt_ac_count" && break -+ test $lt_ac_count -gt 10 && break - lt_ac_count=`expr $lt_ac_count + 1` -- if test "$lt_ac_count" -gt "$lt_ac_max"; then -+ if test $lt_ac_count -gt $lt_ac_max; then - lt_ac_max=$lt_ac_count - lt_cv_path_SED=$lt_ac_sed - fi -@@ -8276,7 +7781,27 @@ - # Find out whether the shell is Bourne or XSI compatible, - # or has some other useful features. - m4_defun([_LT_CHECK_SHELL_FEATURES], --[if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then -+[AC_MSG_CHECKING([whether the shell understands some XSI constructs]) -+# Try some XSI features -+xsi_shell=no -+( _lt_dummy="a/b/c" -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ -+ && eval 'test $(( 1 + 1 )) -eq 2 \ -+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ -+ && xsi_shell=yes -+AC_MSG_RESULT([$xsi_shell]) -+_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell']) -+ -+AC_MSG_CHECKING([whether the shell understands "+="]) -+lt_shell_append=no -+( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \ -+ >/dev/null 2>&1 \ -+ && lt_shell_append=yes -+AC_MSG_RESULT([$lt_shell_append]) -+_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append']) -+ -+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - lt_unset=unset - else - lt_unset=false -@@ -8300,9 +7825,102 @@ - ])# _LT_CHECK_SHELL_FEATURES - - -+# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY) -+# ------------------------------------------------------ -+# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and -+# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY. -+m4_defun([_LT_PROG_FUNCTION_REPLACE], -+[dnl { -+sed -e '/^$1 ()$/,/^} # $1 /c\ -+$1 ()\ -+{\ -+m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1]) -+} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+]) -+ -+ -+# _LT_PROG_REPLACE_SHELLFNS -+# ------------------------- -+# Replace existing portable implementations of several shell functions with -+# equivalent extended shell implementations where those features are available.. -+m4_defun([_LT_PROG_REPLACE_SHELLFNS], -+[if test x"$xsi_shell" = xyes; then -+ _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl -+ case ${1} in -+ */*) func_dirname_result="${1%/*}${2}" ;; -+ * ) func_dirname_result="${3}" ;; -+ esac]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl -+ func_basename_result="${1##*/}"]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl -+ case ${1} in -+ */*) func_dirname_result="${1%/*}${2}" ;; -+ * ) func_dirname_result="${3}" ;; -+ esac -+ func_basename_result="${1##*/}"]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl -+ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -+ # positional parameters, so assign one to ordinary parameter first. -+ func_stripname_result=${3} -+ func_stripname_result=${func_stripname_result#"${1}"} -+ func_stripname_result=${func_stripname_result%"${2}"}]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl -+ func_split_long_opt_name=${1%%=*} -+ func_split_long_opt_arg=${1#*=}]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl -+ func_split_short_opt_arg=${1#??} -+ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl -+ case ${1} in -+ *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -+ *) func_lo2o_result=${1} ;; -+ esac]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}]) -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl -+ func_quote_for_eval "${2}" -+dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \ -+ eval "${1}+=\\\\ \\$func_quote_for_eval_result"]) -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ AC_MSG_WARN([Unable to substitute extended shell functions in $ofile]) -+fi -+]) -+ - # _LT_PATH_CONVERSION_FUNCTIONS - # ----------------------------- --# Determine what file name conversion functions should be used by -+# Determine which file name conversion functions should be used by - # func_to_host_file (and, implicitly, by func_to_host_path). These are needed - # for certain cross-compile configurations and native mingw. - m4_defun([_LT_PATH_CONVERSION_FUNCTIONS], -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/ltmain.sh psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/ltmain.sh ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/ltmain.sh 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/ltmain.sh 2020-07-16 10:48:35.377648742 +0200 -@@ -1,12 +1,9 @@ --#! /bin/sh --## DO NOT EDIT - This file generated from ./build-aux/ltmain.in --## by inline-source v2014-01-03.01 - --# libtool (GNU libtool) 2.4.6 --# Provide generalized library-building support services. -+# libtool (GNU libtool) 2.4.2 - # Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996 - --# Copyright (C) 1996-2015 Free Software Foundation, Inc. -+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, -+# 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc. - # This is free software; see the source for copying conditions. There is NO - # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - -@@ -26,2112 +23,881 @@ - # General Public License for more details. - # - # You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -- -- --PROGRAM=libtool --PACKAGE=libtool --VERSION=2.4.6 --package_revision=2.4.6 -- -- --## ------ ## --## Usage. ## --## ------ ## -- --# Run './libtool --help' for help with using this script from the --# command line. -- -- --## ------------------------------- ## --## User overridable command paths. ## --## ------------------------------- ## -- --# After configure completes, it has a better idea of some of the --# shell tools we need than the defaults used by the functions shared --# with bootstrap, so set those here where they can still be over- --# ridden by the user, but otherwise take precedence. -- --: ${AUTOCONF="autoconf"} --: ${AUTOMAKE="automake"} -- -- --## -------------------------- ## --## Source external libraries. ## --## -------------------------- ## -- --# Much of our low-level functionality needs to be sourced from external --# libraries, which are installed to $pkgauxdir. -- --# Set a version string for this script. --scriptversion=2015-01-20.17; # UTC -- --# General shell script boiler plate, and helper functions. --# Written by Gary V. Vaughan, 2004 -+# along with GNU Libtool; see the file COPYING. If not, a copy -+# can be downloaded from http://www.gnu.org/licenses/gpl.html, -+# or obtained by writing to the Free Software Foundation, Inc., -+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - --# Copyright (C) 2004-2015 Free Software Foundation, Inc. --# This is free software; see the source for copying conditions. There is NO --# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -- --# This program is free software; you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation; either version 3 of the License, or --# (at your option) any later version. -- --# As a special exception to the GNU General Public License, if you distribute --# this file as part of a program or library that is built using GNU Libtool, --# you may include this file under the same distribution terms that you use --# for the rest of that program. -- --# This program is distributed in the hope that it will be useful, --# but WITHOUT ANY WARRANTY; without even the implied warranty of --# MERCHANTABILITY or FITNES FOR A PARTICULAR PURPOSE. See the GNU --# General Public License for more details. -- --# You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -- --# Please report bugs or propose patches to gary@gnu.org. -- -- --## ------ ## --## Usage. ## --## ------ ## -- --# Evaluate this file near the top of your script to gain access to --# the functions and variables defined here: -+# Usage: $progname [OPTION]... [MODE-ARG]... - # --# . `echo "$0" | ${SED-sed} 's|[^/]*$||'`/build-aux/funclib.sh -+# Provide generalized library-building support services. - # --# If you need to override any of the default environment variable --# settings, do that before evaluating this file. -- -- --## -------------------- ## --## Shell normalisation. ## --## -------------------- ## -+# --config show all configuration variables -+# --debug enable verbose shell tracing -+# -n, --dry-run display commands without modifying any files -+# --features display basic configuration information and exit -+# --mode=MODE use operation mode MODE -+# --preserve-dup-deps don't remove duplicate dependency libraries -+# --quiet, --silent don't print informational messages -+# --no-quiet, --no-silent -+# print informational messages (default) -+# --no-warn don't display warning messages -+# --tag=TAG use configuration variables from tag TAG -+# -v, --verbose print more informational messages than default -+# --no-verbose don't print the extra informational messages -+# --version print version information -+# -h, --help, --help-all print short, long, or detailed help message -+# -+# MODE must be one of the following: -+# -+# clean remove files from the build directory -+# compile compile a source file into a libtool object -+# execute automatically set library path, then run a program -+# finish complete the installation of libtool libraries -+# install install libraries or executables -+# link create a library or an executable -+# uninstall remove libraries from an installed directory -+# -+# MODE-ARGS vary depending on the MODE. When passed as first option, -+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that. -+# Try `$progname --help --mode=MODE' for a more detailed description of MODE. -+# -+# When reporting a bug, please describe a test case to reproduce it and -+# include the following information: -+# -+# host-triplet: $host -+# shell: $SHELL -+# compiler: $LTCC -+# compiler flags: $LTCFLAGS -+# linker: $LD (gnu? $with_gnu_ld) -+# $progname: (GNU libtool) 2.4.2 -+# automake: $automake_version -+# autoconf: $autoconf_version -+# -+# Report bugs to <bug-libtool@gnu.org>. -+# GNU libtool home page: <http://www.gnu.org/software/libtool/>. -+# General help using GNU software: <http://www.gnu.org/gethelp/>. - --# Some shells need a little help to be as Bourne compatible as possible. --# Before doing anything else, make sure all that help has been provided! -+PROGRAM=libtool -+PACKAGE=libtool -+VERSION=2.4.2 -+TIMESTAMP="" -+package_revision=1.3337 - --DUALCASE=1; export DUALCASE # for MKS sh --if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : -+# Be Bourne compatible -+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then - emulate sh - NULLCMD=: -- # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which -+ # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST - else -- case `(set -o) 2>/dev/null` in *posix*) set -o posix ;; esac --fi -- --# NLS nuisances: We save the old values in case they are required later. --_G_user_locale= --_G_safe_locale= --for _G_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES --do -- eval "if test set = \"\${$_G_var+set}\"; then -- save_$_G_var=\$$_G_var -- $_G_var=C -- export $_G_var -- _G_user_locale=\"$_G_var=\\\$save_\$_G_var; \$_G_user_locale\" -- _G_safe_locale=\"$_G_var=C; \$_G_safe_locale\" -- fi" --done -- --# CDPATH. --(unset CDPATH) >/dev/null 2>&1 && unset CDPATH -- --# Make sure IFS has a sensible default --sp=' ' --nl=' --' --IFS="$sp $nl" -- --# There are apparently some retarded systems that use ';' as a PATH separator! --if test "${PATH_SEPARATOR+set}" != set; then -- PATH_SEPARATOR=: -- (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { -- (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || -- PATH_SEPARATOR=';' -- } -+ case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac - fi -+BIN_SH=xpg4; export BIN_SH # for Tru64 -+DUALCASE=1; export DUALCASE # for MKS sh - -- -- --## ------------------------- ## --## Locate command utilities. ## --## ------------------------- ## -- -- --# func_executable_p FILE --# ---------------------- --# Check that FILE is an executable regular file. --func_executable_p () -+# A function that is used when there is no print builtin or printf. -+func_fallback_echo () - { -- test -f "$1" && test -x "$1" --} -- -- --# func_path_progs PROGS_LIST CHECK_FUNC [PATH] --# -------------------------------------------- --# Search for either a program that responds to --version with output --# containing "GNU", or else returned by CHECK_FUNC otherwise, by --# trying all the directories in PATH with each of the elements of --# PROGS_LIST. --# --# CHECK_FUNC should accept the path to a candidate program, and --# set $func_check_prog_result if it truncates its output less than --# $_G_path_prog_max characters. --func_path_progs () --{ -- _G_progs_list=$1 -- _G_check_func=$2 -- _G_PATH=${3-"$PATH"} -- -- _G_path_prog_max=0 -- _G_path_prog_found=false -- _G_save_IFS=$IFS; IFS=${PATH_SEPARATOR-:} -- for _G_dir in $_G_PATH; do -- IFS=$_G_save_IFS -- test -z "$_G_dir" && _G_dir=. -- for _G_prog_name in $_G_progs_list; do -- for _exeext in '' .EXE; do -- _G_path_prog=$_G_dir/$_G_prog_name$_exeext -- func_executable_p "$_G_path_prog" || continue -- case `"$_G_path_prog" --version 2>&1` in -- *GNU*) func_path_progs_result=$_G_path_prog _G_path_prog_found=: ;; -- *) $_G_check_func $_G_path_prog -- func_path_progs_result=$func_check_prog_result -- ;; -- esac -- $_G_path_prog_found && break 3 -- done -- done -- done -- IFS=$_G_save_IFS -- test -z "$func_path_progs_result" && { -- echo "no acceptable sed could be found in \$PATH" >&2 -- exit 1 -- } --} -- -- --# We want to be able to use the functions in this file before configure --# has figured out where the best binaries are kept, which means we have --# to search for them ourselves - except when the results are already set --# where we skip the searches. -- --# Unless the user overrides by setting SED, search the path for either GNU --# sed, or the sed that truncates its output the least. --test -z "$SED" && { -- _G_sed_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ -- for _G_i in 1 2 3 4 5 6 7; do -- _G_sed_script=$_G_sed_script$nl$_G_sed_script -- done -- echo "$_G_sed_script" 2>/dev/null | sed 99q >conftest.sed -- _G_sed_script= -- -- func_check_prog_sed () -- { -- _G_path_prog=$1 -- -- _G_count=0 -- printf 0123456789 >conftest.in -- while : -- do -- cat conftest.in conftest.in >conftest.tmp -- mv conftest.tmp conftest.in -- cp conftest.in conftest.nl -- echo '' >> conftest.nl -- "$_G_path_prog" -f conftest.sed <conftest.nl >conftest.out 2>/dev/null || break -- diff conftest.out conftest.nl >/dev/null 2>&1 || break -- _G_count=`expr $_G_count + 1` -- if test "$_G_count" -gt "$_G_path_prog_max"; then -- # Best one so far, save it but keep looking for a better one -- func_check_prog_result=$_G_path_prog -- _G_path_prog_max=$_G_count -- fi -- # 10*(2^10) chars as input seems more than enough -- test 10 -lt "$_G_count" && break -- done -- rm -f conftest.in conftest.tmp conftest.nl conftest.out -- } -- -- func_path_progs "sed gsed" func_check_prog_sed $PATH:/usr/xpg4/bin -- rm -f conftest.sed -- SED=$func_path_progs_result -+ eval 'cat <<_LTECHO_EOF -+$1 -+_LTECHO_EOF' - } - -+# NLS nuisances: We save the old values to restore during execute mode. -+lt_user_locale= -+lt_safe_locale= -+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES -+do -+ eval "if test \"\${$lt_var+set}\" = set; then -+ save_$lt_var=\$$lt_var -+ $lt_var=C -+ export $lt_var -+ lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\" -+ lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\" -+ fi" -+done -+LC_ALL=C -+LANGUAGE=C -+export LANGUAGE LC_ALL - --# Unless the user overrides by setting GREP, search the path for either GNU --# grep, or the grep that truncates its output the least. --test -z "$GREP" && { -- func_check_prog_grep () -- { -- _G_path_prog=$1 -- -- _G_count=0 -- _G_path_prog_max=0 -- printf 0123456789 >conftest.in -- while : -- do -- cat conftest.in conftest.in >conftest.tmp -- mv conftest.tmp conftest.in -- cp conftest.in conftest.nl -- echo 'GREP' >> conftest.nl -- "$_G_path_prog" -e 'GREP$' -e '-(cannot match)-' <conftest.nl >conftest.out 2>/dev/null || break -- diff conftest.out conftest.nl >/dev/null 2>&1 || break -- _G_count=`expr $_G_count + 1` -- if test "$_G_count" -gt "$_G_path_prog_max"; then -- # Best one so far, save it but keep looking for a better one -- func_check_prog_result=$_G_path_prog -- _G_path_prog_max=$_G_count -- fi -- # 10*(2^10) chars as input seems more than enough -- test 10 -lt "$_G_count" && break -- done -- rm -f conftest.in conftest.tmp conftest.nl conftest.out -- } -+$lt_unset CDPATH - -- func_path_progs "grep ggrep" func_check_prog_grep $PATH:/usr/xpg4/bin -- GREP=$func_path_progs_result --} - -+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh -+# is ksh but when the shell is invoked as "sh" and the current value of -+# the _XPG environment variable is not equal to 1 (one), the special -+# positional parameter $0, within a function call, is the name of the -+# function. -+progpath="$0" - --## ------------------------------- ## --## User overridable command paths. ## --## ------------------------------- ## - --# All uppercase variable names are used for environment variables. These --# variables can be overridden by the user before calling a script that --# uses them if a suitable command of that name is not already available --# in the command search PATH. - - : ${CP="cp -f"} --: ${ECHO="printf %s\n"} --: ${EGREP="$GREP -E"} --: ${FGREP="$GREP -F"} --: ${LN_S="ln -s"} -+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'} - : ${MAKE="make"} - : ${MKDIR="mkdir"} - : ${MV="mv -f"} - : ${RM="rm -f"} - : ${SHELL="${CONFIG_SHELL-/bin/sh}"} -+: ${Xsed="$SED -e 1s/^X//"} - -+# Global variables: -+EXIT_SUCCESS=0 -+EXIT_FAILURE=1 -+EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. -+EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. - --## -------------------- ## --## Useful sed snippets. ## --## -------------------- ## -+exit_status=$EXIT_SUCCESS - --sed_dirname='s|/[^/]*$||' --sed_basename='s|^.*/||' -+# Make sure IFS has a sensible default -+lt_nl=' -+' -+IFS=" $lt_nl" - --# Sed substitution that helps us do robust quoting. It backslashifies --# metacharacters that are still active within double-quoted strings. --sed_quote_subst='s|\([`"$\\]\)|\\\1|g' -+dirname="s,/[^/]*$,," -+basename="s,^.*/,," - --# Same as above, but do not quote variable references. --sed_double_quote_subst='s/\(["`\\]\)/\\\1/g' -+# func_dirname file append nondir_replacement -+# Compute the dirname of FILE. If nonempty, add APPEND to the result, -+# otherwise set result to NONDIR_REPLACEMENT. -+func_dirname () -+{ -+ func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -+ if test "X$func_dirname_result" = "X${1}"; then -+ func_dirname_result="${3}" -+ else -+ func_dirname_result="$func_dirname_result${2}" -+ fi -+} # func_dirname may be replaced by extended shell implementation - --# Sed substitution that turns a string into a regex matching for the --# string literally. --sed_make_literal_regex='s|[].[^$\\*\/]|\\&|g' - --# Sed substitution that converts a w32 file name or path --# that contains forward slashes, into one that contains --# (escaped) backslashes. A very naive implementation. --sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' -+# func_basename file -+func_basename () -+{ -+ func_basename_result=`$ECHO "${1}" | $SED "$basename"` -+} # func_basename may be replaced by extended shell implementation - --# Re-'\' parameter expansions in output of sed_double_quote_subst that --# were '\'-ed in input to the same. If an odd number of '\' preceded a --# '$' in input to sed_double_quote_subst, that '$' was protected from --# expansion. Since each input '\' is now two '\'s, look for any number --# of runs of four '\'s followed by two '\'s and then a '$'. '\' that '$'. --_G_bs='\\' --_G_bs2='\\\\' --_G_bs4='\\\\\\\\' --_G_dollar='\$' --sed_double_backslash="\ -- s/$_G_bs4/&\\ --/g -- s/^$_G_bs2$_G_dollar/$_G_bs&/ -- s/\\([^$_G_bs]\\)$_G_bs2$_G_dollar/\\1$_G_bs2$_G_bs$_G_dollar/g -- s/\n//g" -+ -+# func_dirname_and_basename file append nondir_replacement -+# perform func_basename and func_dirname in a single function -+# call: -+# dirname: Compute the dirname of FILE. If nonempty, -+# add APPEND to the result, otherwise set result -+# to NONDIR_REPLACEMENT. -+# value returned in "$func_dirname_result" -+# basename: Compute filename of FILE. -+# value retuned in "$func_basename_result" -+# Implementation must be kept synchronized with func_dirname -+# and func_basename. For efficiency, we do not delegate to -+# those functions but instead duplicate the functionality here. -+func_dirname_and_basename () -+{ -+ # Extract subdirectory from the argument. -+ func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` -+ if test "X$func_dirname_result" = "X${1}"; then -+ func_dirname_result="${3}" -+ else -+ func_dirname_result="$func_dirname_result${2}" -+ fi -+ func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` -+} # func_dirname_and_basename may be replaced by extended shell implementation - - --## ----------------- ## --## Global variables. ## --## ----------------- ## -- --# Except for the global variables explicitly listed below, the following --# functions in the '^func_' namespace, and the '^require_' namespace --# variables initialised in the 'Resource management' section, sourcing --# this file will not pollute your global namespace with anything --# else. There's no portable way to scope variables in Bourne shell --# though, so actually running these functions will sometimes place --# results into a variable named after the function, and often use --# temporary variables in the '^_G_' namespace. If you are careful to --# avoid using those namespaces casually in your sourcing script, things --# should continue to work as you expect. And, of course, you can freely --# overwrite any of the functions or variables defined here before --# calling anything to customize them. -+# func_stripname prefix suffix name -+# strip PREFIX and SUFFIX off of NAME. -+# PREFIX and SUFFIX must not contain globbing or regex special -+# characters, hashes, percent signs, but SUFFIX may contain a leading -+# dot (in which case that matches only a dot). -+# func_strip_suffix prefix name -+func_stripname () -+{ -+ case ${2} in -+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -+ esac -+} # func_stripname may be replaced by extended shell implementation - --EXIT_SUCCESS=0 --EXIT_FAILURE=1 --EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. --EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. - --# Allow overriding, eg assuming that you follow the convention of --# putting '$debug_cmd' at the start of all your functions, you can get --# bash to show function call trace with: --# --# debug_cmd='eval echo "${FUNCNAME[0]} $*" >&2' bash your-script-name --debug_cmd=${debug_cmd-":"} --exit_cmd=: -+# These SED scripts presuppose an absolute path with a trailing slash. -+pathcar='s,^/\([^/]*\).*$,\1,' -+pathcdr='s,^/[^/]*,,' -+removedotparts=':dotsl -+ s@/\./@/@g -+ t dotsl -+ s,/\.$,/,' -+collapseslashes='s@/\{1,\}@/@g' -+finalslash='s,/*$,/,' - --# By convention, finish your script with: --# --# exit $exit_status --# --# so that you can set exit_status to non-zero if you want to indicate --# something went wrong during execution without actually bailing out at --# the point of failure. --exit_status=$EXIT_SUCCESS -+# func_normal_abspath PATH -+# Remove doubled-up and trailing slashes, "." path components, -+# and cancel out any ".." path components in PATH after making -+# it an absolute path. -+# value returned in "$func_normal_abspath_result" -+func_normal_abspath () -+{ -+ # Start from root dir and reassemble the path. -+ func_normal_abspath_result= -+ func_normal_abspath_tpath=$1 -+ func_normal_abspath_altnamespace= -+ case $func_normal_abspath_tpath in -+ "") -+ # Empty path, that just means $cwd. -+ func_stripname '' '/' "`pwd`" -+ func_normal_abspath_result=$func_stripname_result -+ return -+ ;; -+ # The next three entries are used to spot a run of precisely -+ # two leading slashes without using negated character classes; -+ # we take advantage of case's first-match behaviour. -+ ///*) -+ # Unusual form of absolute path, do nothing. -+ ;; -+ //*) -+ # Not necessarily an ordinary path; POSIX reserves leading '//' -+ # and for example Cygwin uses it to access remote file shares -+ # over CIFS/SMB, so we conserve a leading double slash if found. -+ func_normal_abspath_altnamespace=/ -+ ;; -+ /*) -+ # Absolute path, do nothing. -+ ;; -+ *) -+ # Relative path, prepend $cwd. -+ func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath -+ ;; -+ esac -+ # Cancel out all the simple stuff to save iterations. We also want -+ # the path to end with a slash for ease of parsing, so make sure -+ # there is one (and only one) here. -+ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -+ -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"` -+ while :; do -+ # Processed it all yet? -+ if test "$func_normal_abspath_tpath" = / ; then -+ # If we ascended to the root using ".." the result may be empty now. -+ if test -z "$func_normal_abspath_result" ; then -+ func_normal_abspath_result=/ -+ fi -+ break -+ fi -+ func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \ -+ -e "$pathcar"` -+ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -+ -e "$pathcdr"` -+ # Figure out what to do with it -+ case $func_normal_abspath_tcomponent in -+ "") -+ # Trailing empty path component, ignore it. -+ ;; -+ ..) -+ # Parent dir; strip last assembled component from result. -+ func_dirname "$func_normal_abspath_result" -+ func_normal_abspath_result=$func_dirname_result -+ ;; -+ *) -+ # Actual path component, append it. -+ func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent -+ ;; -+ esac -+ done -+ # Restore leading double-slash if one was found on entry. -+ func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result -+} - --# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh --# is ksh but when the shell is invoked as "sh" and the current value of --# the _XPG environment variable is not equal to 1 (one), the special --# positional parameter $0, within a function call, is the name of the --# function. --progpath=$0 -+# func_relative_path SRCDIR DSTDIR -+# generates a relative path from SRCDIR to DSTDIR, with a trailing -+# slash if non-empty, suitable for immediately appending a filename -+# without needing to append a separator. -+# value returned in "$func_relative_path_result" -+func_relative_path () -+{ -+ func_relative_path_result= -+ func_normal_abspath "$1" -+ func_relative_path_tlibdir=$func_normal_abspath_result -+ func_normal_abspath "$2" -+ func_relative_path_tbindir=$func_normal_abspath_result -+ -+ # Ascend the tree starting from libdir -+ while :; do -+ # check if we have found a prefix of bindir -+ case $func_relative_path_tbindir in -+ $func_relative_path_tlibdir) -+ # found an exact match -+ func_relative_path_tcancelled= -+ break -+ ;; -+ $func_relative_path_tlibdir*) -+ # found a matching prefix -+ func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir" -+ func_relative_path_tcancelled=$func_stripname_result -+ if test -z "$func_relative_path_result"; then -+ func_relative_path_result=. -+ fi -+ break -+ ;; -+ *) -+ func_dirname $func_relative_path_tlibdir -+ func_relative_path_tlibdir=${func_dirname_result} -+ if test "x$func_relative_path_tlibdir" = x ; then -+ # Have to descend all the way to the root! -+ func_relative_path_result=../$func_relative_path_result -+ func_relative_path_tcancelled=$func_relative_path_tbindir -+ break -+ fi -+ func_relative_path_result=../$func_relative_path_result -+ ;; -+ esac -+ done - --# The name of this program. --progname=`$ECHO "$progpath" |$SED "$sed_basename"` -+ # Now calculate path; take care to avoid doubling-up slashes. -+ func_stripname '' '/' "$func_relative_path_result" -+ func_relative_path_result=$func_stripname_result -+ func_stripname '/' '/' "$func_relative_path_tcancelled" -+ if test "x$func_stripname_result" != x ; then -+ func_relative_path_result=${func_relative_path_result}/${func_stripname_result} -+ fi - --# Make sure we have an absolute progpath for reexecution: -+ # Normalisation. If bindir is libdir, return empty string, -+ # else relative path ending with a slash; either way, target -+ # file name can be directly appended. -+ if test ! -z "$func_relative_path_result"; then -+ func_stripname './' '' "$func_relative_path_result/" -+ func_relative_path_result=$func_stripname_result -+ fi -+} -+ -+# The name of this program: -+func_dirname_and_basename "$progpath" -+progname=$func_basename_result -+ -+# Make sure we have an absolute path for reexecution: - case $progpath in - [\\/]*|[A-Za-z]:\\*) ;; - *[\\/]*) -- progdir=`$ECHO "$progpath" |$SED "$sed_dirname"` -+ progdir=$func_dirname_result - progdir=`cd "$progdir" && pwd` -- progpath=$progdir/$progname -+ progpath="$progdir/$progname" - ;; - *) -- _G_IFS=$IFS -+ save_IFS="$IFS" - IFS=${PATH_SEPARATOR-:} - for progdir in $PATH; do -- IFS=$_G_IFS -+ IFS="$save_IFS" - test -x "$progdir/$progname" && break - done -- IFS=$_G_IFS -+ IFS="$save_IFS" - test -n "$progdir" || progdir=`pwd` -- progpath=$progdir/$progname -+ progpath="$progdir/$progname" - ;; - esac - -+# Sed substitution that helps us do robust quoting. It backslashifies -+# metacharacters that are still active within double-quoted strings. -+Xsed="${SED}"' -e 1s/^X//' -+sed_quote_subst='s/\([`"$\\]\)/\\\1/g' - --## ----------------- ## --## Standard options. ## --## ----------------- ## -- --# The following options affect the operation of the functions defined --# below, and should be set appropriately depending on run-time para- --# meters passed on the command line. -- --opt_dry_run=false --opt_quiet=false --opt_verbose=false -- --# Categories 'all' and 'none' are always available. Append any others --# you will pass as the first argument to func_warning from your own --# code. --warning_categories= -- --# By default, display warnings according to 'opt_warning_types'. Set --# 'warning_func' to ':' to elide all warnings, or func_fatal_error to --# treat the next displayed warning as a fatal error. --warning_func=func_warn_and_continue -- --# Set to 'all' to display all warnings, 'none' to suppress all --# warnings, or a space delimited list of some subset of --# 'warning_categories' to display only the listed warnings. --opt_warning_types=all -- -- --## -------------------- ## --## Resource management. ## --## -------------------- ## -- --# This section contains definitions for functions that each ensure a --# particular resource (a file, or a non-empty configuration variable for --# example) is available, and if appropriate to extract default values --# from pertinent package files. Call them using their associated --# 'require_*' variable to ensure that they are executed, at most, once. --# --# It's entirely deliberate that calling these functions can set --# variables that don't obey the namespace limitations obeyed by the rest --# of this file, in order that that they be as useful as possible to --# callers. -- -- --# require_term_colors --# ------------------- --# Allow display of bold text on terminals that support it. --require_term_colors=func_require_term_colors --func_require_term_colors () --{ -- $debug_cmd -- -- test -t 1 && { -- # COLORTERM and USE_ANSI_COLORS environment variables take -- # precedence, because most terminfo databases neglect to describe -- # whether color sequences are supported. -- test -n "${COLORTERM+set}" && : ${USE_ANSI_COLORS="1"} -- -- if test 1 = "$USE_ANSI_COLORS"; then -- # Standard ANSI escape sequences -- tc_reset='[0m' -- tc_bold='[1m'; tc_standout='[7m' -- tc_red='[31m'; tc_green='[32m' -- tc_blue='[34m'; tc_cyan='[36m' -- else -- # Otherwise trust the terminfo database after all. -- test -n "`tput sgr0 2>/dev/null`" && { -- tc_reset=`tput sgr0` -- test -n "`tput bold 2>/dev/null`" && tc_bold=`tput bold` -- tc_standout=$tc_bold -- test -n "`tput smso 2>/dev/null`" && tc_standout=`tput smso` -- test -n "`tput setaf 1 2>/dev/null`" && tc_red=`tput setaf 1` -- test -n "`tput setaf 2 2>/dev/null`" && tc_green=`tput setaf 2` -- test -n "`tput setaf 4 2>/dev/null`" && tc_blue=`tput setaf 4` -- test -n "`tput setaf 5 2>/dev/null`" && tc_cyan=`tput setaf 5` -- } -- fi -- } -- -- require_term_colors=: --} -- -- --## ----------------- ## --## Function library. ## --## ----------------- ## -- --# This section contains a variety of useful functions to call in your --# scripts. Take note of the portable wrappers for features provided by --# some modern shells, which will fall back to slower equivalents on --# less featureful shells. -- -- --# func_append VAR VALUE --# --------------------- --# Append VALUE onto the existing contents of VAR. -- -- # We should try to minimise forks, especially on Windows where they are -- # unreasonably slow, so skip the feature probes when bash or zsh are -- # being used: -- if test set = "${BASH_VERSION+set}${ZSH_VERSION+set}"; then -- : ${_G_HAVE_ARITH_OP="yes"} -- : ${_G_HAVE_XSI_OPS="yes"} -- # The += operator was introduced in bash 3.1 -- case $BASH_VERSION in -- [12].* | 3.0 | 3.0*) ;; -- *) -- : ${_G_HAVE_PLUSEQ_OP="yes"} -- ;; -- esac -- fi -- -- # _G_HAVE_PLUSEQ_OP -- # Can be empty, in which case the shell is probed, "yes" if += is -- # useable or anything else if it does not work. -- test -z "$_G_HAVE_PLUSEQ_OP" \ -- && (eval 'x=a; x+=" b"; test "a b" = "$x"') 2>/dev/null \ -- && _G_HAVE_PLUSEQ_OP=yes -- --if test yes = "$_G_HAVE_PLUSEQ_OP" --then -- # This is an XSI compatible shell, allowing a faster implementation... -- eval 'func_append () -- { -- $debug_cmd -- -- eval "$1+=\$2" -- }' --else -- # ...otherwise fall back to using expr, which is often a shell builtin. -- func_append () -- { -- $debug_cmd -- -- eval "$1=\$$1\$2" -- } --fi -- -- --# func_append_quoted VAR VALUE --# ---------------------------- --# Quote VALUE and append to the end of shell variable VAR, separated --# by a space. --if test yes = "$_G_HAVE_PLUSEQ_OP"; then -- eval 'func_append_quoted () -- { -- $debug_cmd -- -- func_quote_for_eval "$2" -- eval "$1+=\\ \$func_quote_for_eval_result" -- }' --else -- func_append_quoted () -- { -- $debug_cmd -- -- func_quote_for_eval "$2" -- eval "$1=\$$1\\ \$func_quote_for_eval_result" -- } --fi -- -- --# func_append_uniq VAR VALUE --# -------------------------- --# Append unique VALUE onto the existing contents of VAR, assuming --# entries are delimited by the first character of VALUE. For example: --# --# func_append_uniq options " --another-option option-argument" --# --# will only append to $options if " --another-option option-argument " --# is not already present somewhere in $options already (note spaces at --# each end implied by leading space in second argument). --func_append_uniq () --{ -- $debug_cmd -- -- eval _G_current_value='`$ECHO $'$1'`' -- _G_delim=`expr "$2" : '\(.\)'` -- -- case $_G_delim$_G_current_value$_G_delim in -- *"$2$_G_delim"*) ;; -- *) func_append "$@" ;; -- esac --} -- -- --# func_arith TERM... --# ------------------ --# Set func_arith_result to the result of evaluating TERMs. -- test -z "$_G_HAVE_ARITH_OP" \ -- && (eval 'test 2 = $(( 1 + 1 ))') 2>/dev/null \ -- && _G_HAVE_ARITH_OP=yes -- --if test yes = "$_G_HAVE_ARITH_OP"; then -- eval 'func_arith () -- { -- $debug_cmd -- -- func_arith_result=$(( $* )) -- }' --else -- func_arith () -- { -- $debug_cmd -- -- func_arith_result=`expr "$@"` -- } --fi -- -- --# func_basename FILE --# ------------------ --# Set func_basename_result to FILE with everything up to and including --# the last / stripped. --if test yes = "$_G_HAVE_XSI_OPS"; then -- # If this shell supports suffix pattern removal, then use it to avoid -- # forking. Hide the definitions single quotes in case the shell chokes -- # on unsupported syntax... -- _b='func_basename_result=${1##*/}' -- _d='case $1 in -- */*) func_dirname_result=${1%/*}$2 ;; -- * ) func_dirname_result=$3 ;; -- esac' -- --else -- # ...otherwise fall back to using sed. -- _b='func_basename_result=`$ECHO "$1" |$SED "$sed_basename"`' -- _d='func_dirname_result=`$ECHO "$1" |$SED "$sed_dirname"` -- if test "X$func_dirname_result" = "X$1"; then -- func_dirname_result=$3 -- else -- func_append func_dirname_result "$2" -- fi' --fi -- --eval 'func_basename () --{ -- $debug_cmd -- -- '"$_b"' --}' -- -- --# func_dirname FILE APPEND NONDIR_REPLACEMENT --# ------------------------------------------- --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --eval 'func_dirname () --{ -- $debug_cmd -- -- '"$_d"' --}' -+# Same as above, but do not quote variable references. -+double_quote_subst='s/\(["`\\]\)/\\\1/g' - -+# Sed substitution that turns a string into a regex matching for the -+# string literally. -+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g' - --# func_dirname_and_basename FILE APPEND NONDIR_REPLACEMENT --# -------------------------------------------------------- --# Perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# For efficiency, we do not delegate to the functions above but instead --# duplicate the functionality here. --eval 'func_dirname_and_basename () --{ -- $debug_cmd -+# Sed substitution that converts a w32 file name or path -+# which contains forward slashes, into one that contains -+# (escaped) backslashes. A very naive implementation. -+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' - -- '"$_b"' -- '"$_d"' --}' -+# Re-`\' parameter expansions in output of double_quote_subst that were -+# `\'-ed in input to the same. If an odd number of `\' preceded a '$' -+# in input to double_quote_subst, that '$' was protected from expansion. -+# Since each input `\' is now two `\'s, look for any number of runs of -+# four `\'s followed by two `\'s and then a '$'. `\' that '$'. -+bs='\\' -+bs2='\\\\' -+bs4='\\\\\\\\' -+dollar='\$' -+sed_double_backslash="\ -+ s/$bs4/&\\ -+/g -+ s/^$bs2$dollar/$bs&/ -+ s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g -+ s/\n//g" - -+# Standard options: -+opt_dry_run=false -+opt_help=false -+opt_quiet=false -+opt_verbose=false -+opt_warning=: - --# func_echo ARG... --# ---------------- --# Echo program name prefixed message. -+# func_echo arg... -+# Echo program name prefixed message, along with the current mode -+# name if it has been set yet. - func_echo () - { -- $debug_cmd -+ $ECHO "$progname: ${opt_mode+$opt_mode: }$*" -+} - -- _G_message=$* -+# func_verbose arg... -+# Echo program name prefixed message in verbose mode only. -+func_verbose () -+{ -+ $opt_verbose && func_echo ${1+"$@"} - -- func_echo_IFS=$IFS -- IFS=$nl -- for _G_line in $_G_message; do -- IFS=$func_echo_IFS -- $ECHO "$progname: $_G_line" -- done -- IFS=$func_echo_IFS -+ # A bug in bash halts the script if the last line of a function -+ # fails when set -e is in force, so we need another command to -+ # work around that: -+ : - } - -- --# func_echo_all ARG... --# -------------------- -+# func_echo_all arg... - # Invoke $ECHO with all args, space-separated. - func_echo_all () - { - $ECHO "$*" - } - -- --# func_echo_infix_1 INFIX ARG... --# ------------------------------ --# Echo program name, followed by INFIX on the first line, with any --# additional lines not showing INFIX. --func_echo_infix_1 () --{ -- $debug_cmd -- -- $require_term_colors -- -- _G_infix=$1; shift -- _G_indent=$_G_infix -- _G_prefix="$progname: $_G_infix: " -- _G_message=$* -- -- # Strip color escape sequences before counting printable length -- for _G_tc in "$tc_reset" "$tc_bold" "$tc_standout" "$tc_red" "$tc_green" "$tc_blue" "$tc_cyan" -- do -- test -n "$_G_tc" && { -- _G_esc_tc=`$ECHO "$_G_tc" | $SED "$sed_make_literal_regex"` -- _G_indent=`$ECHO "$_G_indent" | $SED "s|$_G_esc_tc||g"` -- } -- done -- _G_indent="$progname: "`echo "$_G_indent" | $SED 's|.| |g'`" " ## exclude from sc_prohibit_nested_quotes -- -- func_echo_infix_1_IFS=$IFS -- IFS=$nl -- for _G_line in $_G_message; do -- IFS=$func_echo_infix_1_IFS -- $ECHO "$_G_prefix$tc_bold$_G_line$tc_reset" >&2 -- _G_prefix=$_G_indent -- done -- IFS=$func_echo_infix_1_IFS --} -- -- --# func_error ARG... --# ----------------- -+# func_error arg... - # Echo program name prefixed message to standard error. - func_error () - { -- $debug_cmd -+ $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2 -+} - -- $require_term_colors -+# func_warning arg... -+# Echo program name prefixed warning message to standard error. -+func_warning () -+{ -+ $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2 - -- func_echo_infix_1 " $tc_standout${tc_red}error$tc_reset" "$*" >&2 -+ # bash bug again: -+ : - } - -- --# func_fatal_error ARG... --# ----------------------- -+# func_fatal_error arg... - # Echo program name prefixed message to standard error, and exit. - func_fatal_error () - { -- $debug_cmd -- -- func_error "$*" -+ func_error ${1+"$@"} - exit $EXIT_FAILURE - } - -+# func_fatal_help arg... -+# Echo program name prefixed message to standard error, followed by -+# a help hint, and exit. -+func_fatal_help () -+{ -+ func_error ${1+"$@"} -+ func_fatal_error "$help" -+} -+help="Try \`$progname --help' for more information." ## default -+ - --# func_grep EXPRESSION FILENAME --# ----------------------------- -+# func_grep expression filename - # Check whether EXPRESSION matches any line of FILENAME, without output. - func_grep () - { -- $debug_cmd -- - $GREP "$1" "$2" >/dev/null 2>&1 - } - - --# func_len STRING --# --------------- --# Set func_len_result to the length of STRING. STRING may not --# start with a hyphen. -- test -z "$_G_HAVE_XSI_OPS" \ -- && (eval 'x=a/b/c; -- test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \ -- && _G_HAVE_XSI_OPS=yes -- --if test yes = "$_G_HAVE_XSI_OPS"; then -- eval 'func_len () -- { -- $debug_cmd -- -- func_len_result=${#1} -- }' --else -- func_len () -- { -- $debug_cmd -- -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` -- } --fi -- -- --# func_mkdir_p DIRECTORY-PATH --# --------------------------- -+# func_mkdir_p directory-path - # Make sure the entire path to DIRECTORY-PATH is available. - func_mkdir_p () - { -- $debug_cmd -- -- _G_directory_path=$1 -- _G_dir_list= -+ my_directory_path="$1" -+ my_dir_list= - -- if test -n "$_G_directory_path" && test : != "$opt_dry_run"; then -+ if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then - -- # Protect directory names starting with '-' -- case $_G_directory_path in -- -*) _G_directory_path=./$_G_directory_path ;; -+ # Protect directory names starting with `-' -+ case $my_directory_path in -+ -*) my_directory_path="./$my_directory_path" ;; - esac - - # While some portion of DIR does not yet exist... -- while test ! -d "$_G_directory_path"; do -+ while test ! -d "$my_directory_path"; do - # ...make a list in topmost first order. Use a colon delimited - # list incase some portion of path contains whitespace. -- _G_dir_list=$_G_directory_path:$_G_dir_list -+ my_dir_list="$my_directory_path:$my_dir_list" - - # If the last portion added has no slash in it, the list is done -- case $_G_directory_path in */*) ;; *) break ;; esac -+ case $my_directory_path in */*) ;; *) break ;; esac - - # ...otherwise throw away the child directory and loop -- _G_directory_path=`$ECHO "$_G_directory_path" | $SED -e "$sed_dirname"` -+ my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"` - done -- _G_dir_list=`$ECHO "$_G_dir_list" | $SED 's|:*$||'` -+ my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'` - -- func_mkdir_p_IFS=$IFS; IFS=: -- for _G_dir in $_G_dir_list; do -- IFS=$func_mkdir_p_IFS -- # mkdir can fail with a 'File exist' error if two processes -+ save_mkdir_p_IFS="$IFS"; IFS=':' -+ for my_dir in $my_dir_list; do -+ IFS="$save_mkdir_p_IFS" -+ # mkdir can fail with a `File exist' error if two processes - # try to create one of the directories concurrently. Don't - # stop in that case! -- $MKDIR "$_G_dir" 2>/dev/null || : -+ $MKDIR "$my_dir" 2>/dev/null || : - done -- IFS=$func_mkdir_p_IFS -+ IFS="$save_mkdir_p_IFS" - - # Bail out if we (or some other process) failed to create a directory. -- test -d "$_G_directory_path" || \ -- func_fatal_error "Failed to create '$1'" -+ test -d "$my_directory_path" || \ -+ func_fatal_error "Failed to create \`$1'" - fi - } - - --# func_mktempdir [BASENAME] --# ------------------------- -+# func_mktempdir [string] - # Make a temporary directory that won't clash with other running - # libtool processes, and avoids race conditions if possible. If --# given, BASENAME is the basename for that directory. -+# given, STRING is the basename for that directory. - func_mktempdir () - { -- $debug_cmd -+ my_template="${TMPDIR-/tmp}/${1-$progname}" - -- _G_template=${TMPDIR-/tmp}/${1-$progname} -- -- if test : = "$opt_dry_run"; then -+ if test "$opt_dry_run" = ":"; then - # Return a directory name, but don't create it in dry-run mode -- _G_tmpdir=$_G_template-$$ -+ my_tmpdir="${my_template}-$$" - else - - # If mktemp works, use that first and foremost -- _G_tmpdir=`mktemp -d "$_G_template-XXXXXXXX" 2>/dev/null` -+ my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null` - -- if test ! -d "$_G_tmpdir"; then -+ if test ! -d "$my_tmpdir"; then - # Failing that, at least try and use $RANDOM to avoid a race -- _G_tmpdir=$_G_template-${RANDOM-0}$$ -+ my_tmpdir="${my_template}-${RANDOM-0}$$" - -- func_mktempdir_umask=`umask` -+ save_mktempdir_umask=`umask` - umask 0077 -- $MKDIR "$_G_tmpdir" -- umask $func_mktempdir_umask -+ $MKDIR "$my_tmpdir" -+ umask $save_mktempdir_umask - fi - - # If we're not in dry-run mode, bomb out on failure -- test -d "$_G_tmpdir" || \ -- func_fatal_error "cannot create temporary directory '$_G_tmpdir'" -+ test -d "$my_tmpdir" || \ -+ func_fatal_error "cannot create temporary directory \`$my_tmpdir'" - fi - -- $ECHO "$_G_tmpdir" -+ $ECHO "$my_tmpdir" - } - - --# func_normal_abspath PATH --# ------------------------ --# Remove doubled-up and trailing slashes, "." path components, --# and cancel out any ".." path components in PATH after making --# it an absolute path. --func_normal_abspath () -+# func_quote_for_eval arg -+# Aesthetically quote ARG to be evaled later. -+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT -+# is double-quoted, suitable for a subsequent eval, whereas -+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters -+# which are still active within double quotes backslashified. -+func_quote_for_eval () - { -- $debug_cmd -+ case $1 in -+ *[\\\`\"\$]*) -+ func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;; -+ *) -+ func_quote_for_eval_unquoted_result="$1" ;; -+ esac - -- # These SED scripts presuppose an absolute path with a trailing slash. -- _G_pathcar='s|^/\([^/]*\).*$|\1|' -- _G_pathcdr='s|^/[^/]*||' -- _G_removedotparts=':dotsl -- s|/\./|/|g -- t dotsl -- s|/\.$|/|' -- _G_collapseslashes='s|/\{1,\}|/|g' -- _G_finalslash='s|/*$|/|' -- -- # Start from root dir and reassemble the path. -- func_normal_abspath_result= -- func_normal_abspath_tpath=$1 -- func_normal_abspath_altnamespace= -- case $func_normal_abspath_tpath in -- "") -- # Empty path, that just means $cwd. -- func_stripname '' '/' "`pwd`" -- func_normal_abspath_result=$func_stripname_result -- return -- ;; -- # The next three entries are used to spot a run of precisely -- # two leading slashes without using negated character classes; -- # we take advantage of case's first-match behaviour. -- ///*) -- # Unusual form of absolute path, do nothing. -- ;; -- //*) -- # Not necessarily an ordinary path; POSIX reserves leading '//' -- # and for example Cygwin uses it to access remote file shares -- # over CIFS/SMB, so we conserve a leading double slash if found. -- func_normal_abspath_altnamespace=/ -- ;; -- /*) -- # Absolute path, do nothing. -+ case $func_quote_for_eval_unquoted_result in -+ # Double-quote args containing shell metacharacters to delay -+ # word splitting, command substitution and and variable -+ # expansion for a subsequent eval. -+ # Many Bourne shells cannot handle close brackets correctly -+ # in scan sets, so we specify it separately. -+ *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") -+ func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\"" - ;; - *) -- # Relative path, prepend $cwd. -- func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath -- ;; -+ func_quote_for_eval_result="$func_quote_for_eval_unquoted_result" - esac -- -- # Cancel out all the simple stuff to save iterations. We also want -- # the path to end with a slash for ease of parsing, so make sure -- # there is one (and only one) here. -- func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -- -e "$_G_removedotparts" -e "$_G_collapseslashes" -e "$_G_finalslash"` -- while :; do -- # Processed it all yet? -- if test / = "$func_normal_abspath_tpath"; then -- # If we ascended to the root using ".." the result may be empty now. -- if test -z "$func_normal_abspath_result"; then -- func_normal_abspath_result=/ -- fi -- break -- fi -- func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \ -- -e "$_G_pathcar"` -- func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -- -e "$_G_pathcdr"` -- # Figure out what to do with it -- case $func_normal_abspath_tcomponent in -- "") -- # Trailing empty path component, ignore it. -- ;; -- ..) -- # Parent dir; strip last assembled component from result. -- func_dirname "$func_normal_abspath_result" -- func_normal_abspath_result=$func_dirname_result -- ;; -- *) -- # Actual path component, append it. -- func_append func_normal_abspath_result "/$func_normal_abspath_tcomponent" -- ;; -- esac -- done -- # Restore leading double-slash if one was found on entry. -- func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result --} -- -- --# func_notquiet ARG... --# -------------------- --# Echo program name prefixed message only when not in quiet mode. --func_notquiet () --{ -- $debug_cmd -- -- $opt_quiet || func_echo ${1+"$@"} -- -- # A bug in bash halts the script if the last line of a function -- # fails when set -e is in force, so we need another command to -- # work around that: -- : --} -- -- --# func_relative_path SRCDIR DSTDIR --# -------------------------------- --# Set func_relative_path_result to the relative path from SRCDIR to DSTDIR. --func_relative_path () --{ -- $debug_cmd -- -- func_relative_path_result= -- func_normal_abspath "$1" -- func_relative_path_tlibdir=$func_normal_abspath_result -- func_normal_abspath "$2" -- func_relative_path_tbindir=$func_normal_abspath_result -- -- # Ascend the tree starting from libdir -- while :; do -- # check if we have found a prefix of bindir -- case $func_relative_path_tbindir in -- $func_relative_path_tlibdir) -- # found an exact match -- func_relative_path_tcancelled= -- break -- ;; -- $func_relative_path_tlibdir*) -- # found a matching prefix -- func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir" -- func_relative_path_tcancelled=$func_stripname_result -- if test -z "$func_relative_path_result"; then -- func_relative_path_result=. -- fi -- break -- ;; -- *) -- func_dirname $func_relative_path_tlibdir -- func_relative_path_tlibdir=$func_dirname_result -- if test -z "$func_relative_path_tlibdir"; then -- # Have to descend all the way to the root! -- func_relative_path_result=../$func_relative_path_result -- func_relative_path_tcancelled=$func_relative_path_tbindir -- break -- fi -- func_relative_path_result=../$func_relative_path_result -- ;; -- esac -- done -- -- # Now calculate path; take care to avoid doubling-up slashes. -- func_stripname '' '/' "$func_relative_path_result" -- func_relative_path_result=$func_stripname_result -- func_stripname '/' '/' "$func_relative_path_tcancelled" -- if test -n "$func_stripname_result"; then -- func_append func_relative_path_result "/$func_stripname_result" -- fi -- -- # Normalisation. If bindir is libdir, return '.' else relative path. -- if test -n "$func_relative_path_result"; then -- func_stripname './' '' "$func_relative_path_result" -- func_relative_path_result=$func_stripname_result -- fi -- -- test -n "$func_relative_path_result" || func_relative_path_result=. -- -- : - } - - --# func_quote_for_eval ARG... --# -------------------------- --# Aesthetically quote ARGs to be evaled later. --# This function returns two values: --# i) func_quote_for_eval_result --# double-quoted, suitable for a subsequent eval --# ii) func_quote_for_eval_unquoted_result --# has all characters that are still active within double --# quotes backslashified. --func_quote_for_eval () --{ -- $debug_cmd -- -- func_quote_for_eval_unquoted_result= -- func_quote_for_eval_result= -- while test 0 -lt $#; do -- case $1 in -- *[\\\`\"\$]*) -- _G_unquoted_arg=`printf '%s\n' "$1" |$SED "$sed_quote_subst"` ;; -- *) -- _G_unquoted_arg=$1 ;; -- esac -- if test -n "$func_quote_for_eval_unquoted_result"; then -- func_append func_quote_for_eval_unquoted_result " $_G_unquoted_arg" -- else -- func_append func_quote_for_eval_unquoted_result "$_G_unquoted_arg" -- fi -- -- case $_G_unquoted_arg in -- # Double-quote args containing shell metacharacters to delay -- # word splitting, command substitution and variable expansion -- # for a subsequent eval. -- # Many Bourne shells cannot handle close brackets correctly -- # in scan sets, so we specify it separately. -- *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") -- _G_quoted_arg=\"$_G_unquoted_arg\" -- ;; -- *) -- _G_quoted_arg=$_G_unquoted_arg -- ;; -- esac -- -- if test -n "$func_quote_for_eval_result"; then -- func_append func_quote_for_eval_result " $_G_quoted_arg" -- else -- func_append func_quote_for_eval_result "$_G_quoted_arg" -- fi -- shift -- done --} -- -- --# func_quote_for_expand ARG --# ------------------------- -+# func_quote_for_expand arg - # Aesthetically quote ARG to be evaled later; same as above, - # but do not quote variable references. - func_quote_for_expand () - { -- $debug_cmd -- - case $1 in - *[\\\`\"]*) -- _G_arg=`$ECHO "$1" | $SED \ -- -e "$sed_double_quote_subst" -e "$sed_double_backslash"` ;; -+ my_arg=`$ECHO "$1" | $SED \ -+ -e "$double_quote_subst" -e "$sed_double_backslash"` ;; - *) -- _G_arg=$1 ;; -+ my_arg="$1" ;; - esac - -- case $_G_arg in -+ case $my_arg in - # Double-quote args containing shell metacharacters to delay - # word splitting and command substitution for a subsequent eval. - # Many Bourne shells cannot handle close brackets correctly - # in scan sets, so we specify it separately. - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") -- _G_arg=\"$_G_arg\" -+ my_arg="\"$my_arg\"" - ;; - esac - -- func_quote_for_expand_result=$_G_arg -+ func_quote_for_expand_result="$my_arg" - } - - --# func_stripname PREFIX SUFFIX NAME --# --------------------------------- --# strip PREFIX and SUFFIX from NAME, and store in func_stripname_result. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --if test yes = "$_G_HAVE_XSI_OPS"; then -- eval 'func_stripname () -- { -- $debug_cmd -- -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary variable first. -- func_stripname_result=$3 -- func_stripname_result=${func_stripname_result#"$1"} -- func_stripname_result=${func_stripname_result%"$2"} -- }' --else -- func_stripname () -- { -- $debug_cmd -- -- case $2 in -- .*) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%\\\\$2\$%%"`;; -- *) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%$2\$%%"`;; -- esac -- } --fi -- -- --# func_show_eval CMD [FAIL_EXP] --# ----------------------------- --# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is -+# func_show_eval cmd [fail_exp] -+# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is - # not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP - # is given, then evaluate it. - func_show_eval () - { -- $debug_cmd -- -- _G_cmd=$1 -- _G_fail_exp=${2-':'} -+ my_cmd="$1" -+ my_fail_exp="${2-:}" - -- func_quote_for_expand "$_G_cmd" -- eval "func_notquiet $func_quote_for_expand_result" -+ ${opt_silent-false} || { -+ func_quote_for_expand "$my_cmd" -+ eval "func_echo $func_quote_for_expand_result" -+ } - -- $opt_dry_run || { -- eval "$_G_cmd" -- _G_status=$? -- if test 0 -ne "$_G_status"; then -- eval "(exit $_G_status); $_G_fail_exp" -+ if ${opt_dry_run-false}; then :; else -+ eval "$my_cmd" -+ my_status=$? -+ if test "$my_status" -eq 0; then :; else -+ eval "(exit $my_status); $my_fail_exp" - fi -- } -+ fi - } - - --# func_show_eval_locale CMD [FAIL_EXP] --# ------------------------------------ --# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is -+# func_show_eval_locale cmd [fail_exp] -+# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is - # not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP - # is given, then evaluate it. Use the saved locale for evaluation. - func_show_eval_locale () - { -- $debug_cmd -- -- _G_cmd=$1 -- _G_fail_exp=${2-':'} -+ my_cmd="$1" -+ my_fail_exp="${2-:}" - -- $opt_quiet || { -- func_quote_for_expand "$_G_cmd" -+ ${opt_silent-false} || { -+ func_quote_for_expand "$my_cmd" - eval "func_echo $func_quote_for_expand_result" - } - -- $opt_dry_run || { -- eval "$_G_user_locale -- $_G_cmd" -- _G_status=$? -- eval "$_G_safe_locale" -- if test 0 -ne "$_G_status"; then -- eval "(exit $_G_status); $_G_fail_exp" -+ if ${opt_dry_run-false}; then :; else -+ eval "$lt_user_locale -+ $my_cmd" -+ my_status=$? -+ eval "$lt_safe_locale" -+ if test "$my_status" -eq 0; then :; else -+ eval "(exit $my_status); $my_fail_exp" - fi -- } -+ fi - } - -- - # func_tr_sh --# ---------- - # Turn $1 into a string suitable for a shell variable name. - # Result is stored in $func_tr_sh_result. All characters - # not in the set a-zA-Z0-9_ are replaced with '_'. Further, - # if $1 begins with a digit, a '_' is prepended as well. - func_tr_sh () - { -- $debug_cmd -- -- case $1 in -- [0-9]* | *[!a-zA-Z0-9_]*) -- func_tr_sh_result=`$ECHO "$1" | $SED -e 's/^\([0-9]\)/_\1/' -e 's/[^a-zA-Z0-9_]/_/g'` -- ;; -- * ) -- func_tr_sh_result=$1 -- ;; -- esac --} -- -- --# func_verbose ARG... --# ------------------- --# Echo program name prefixed message in verbose mode only. --func_verbose () --{ -- $debug_cmd -- -- $opt_verbose && func_echo "$*" -- -- : --} -- -- --# func_warn_and_continue ARG... --# ----------------------------- --# Echo program name prefixed warning message to standard error. --func_warn_and_continue () --{ -- $debug_cmd -- -- $require_term_colors -- -- func_echo_infix_1 "${tc_red}warning$tc_reset" "$*" >&2 --} -- -- --# func_warning CATEGORY ARG... --# ---------------------------- --# Echo program name prefixed warning message to standard error. Warning --# messages can be filtered according to CATEGORY, where this function --# elides messages where CATEGORY is not listed in the global variable --# 'opt_warning_types'. --func_warning () --{ -- $debug_cmd -- -- # CATEGORY must be in the warning_categories list! -- case " $warning_categories " in -- *" $1 "*) ;; -- *) func_internal_error "invalid warning category '$1'" ;; -- esac -- -- _G_category=$1 -- shift -- -- case " $opt_warning_types " in -- *" $_G_category "*) $warning_func ${1+"$@"} ;; -- esac --} -- -- --# func_sort_ver VER1 VER2 --# ----------------------- --# 'sort -V' is not generally available. --# Note this deviates from the version comparison in automake --# in that it treats 1.5 < 1.5.0, and treats 1.4.4a < 1.4-p3a --# but this should suffice as we won't be specifying old --# version formats or redundant trailing .0 in bootstrap.conf. --# If we did want full compatibility then we should probably --# use m4_version_compare from autoconf. --func_sort_ver () --{ -- $debug_cmd -- -- printf '%s\n%s\n' "$1" "$2" \ -- | sort -t. -k 1,1n -k 2,2n -k 3,3n -k 4,4n -k 5,5n -k 6,6n -k 7,7n -k 8,8n -k 9,9n --} -- --# func_lt_ver PREV CURR --# --------------------- --# Return true if PREV and CURR are in the correct order according to --# func_sort_ver, otherwise false. Use it like this: --# --# func_lt_ver "$prev_ver" "$proposed_ver" || func_fatal_error "..." --func_lt_ver () --{ -- $debug_cmd -- -- test "x$1" = x`func_sort_ver "$1" "$2" | $SED 1q` --} -- -- --# Local variables: --# mode: shell-script --# sh-indentation: 2 --# eval: (add-hook 'before-save-hook 'time-stamp) --# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC" --# time-stamp-time-zone: "UTC" --# End: --#! /bin/sh -- --# Set a version string for this script. --scriptversion=2014-01-07.03; # UTC -- --# A portable, pluggable option parser for Bourne shell. --# Written by Gary V. Vaughan, 2010 -- --# Copyright (C) 2010-2015 Free Software Foundation, Inc. --# This is free software; see the source for copying conditions. There is NO --# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -- --# This program is free software: you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation, either version 3 of the License, or --# (at your option) any later version. -- --# This program is distributed in the hope that it will be useful, --# but WITHOUT ANY WARRANTY; without even the implied warranty of --# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --# GNU General Public License for more details. -- --# You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -- --# Please report bugs or propose patches to gary@gnu.org. -- -- --## ------ ## --## Usage. ## --## ------ ## -- --# This file is a library for parsing options in your shell scripts along --# with assorted other useful supporting features that you can make use --# of too. --# --# For the simplest scripts you might need only: --# --# #!/bin/sh --# . relative/path/to/funclib.sh --# . relative/path/to/options-parser --# scriptversion=1.0 --# func_options ${1+"$@"} --# eval set dummy "$func_options_result"; shift --# ...rest of your script... --# --# In order for the '--version' option to work, you will need to have a --# suitably formatted comment like the one at the top of this file --# starting with '# Written by ' and ending with '# warranty; '. --# --# For '-h' and '--help' to work, you will also need a one line --# description of your script's purpose in a comment directly above the --# '# Written by ' line, like the one at the top of this file. --# --# The default options also support '--debug', which will turn on shell --# execution tracing (see the comment above debug_cmd below for another --# use), and '--verbose' and the func_verbose function to allow your script --# to display verbose messages only when your user has specified --# '--verbose'. --# --# After sourcing this file, you can plug processing for additional --# options by amending the variables from the 'Configuration' section --# below, and following the instructions in the 'Option parsing' --# section further down. -- --## -------------- ## --## Configuration. ## --## -------------- ## -- --# You should override these variables in your script after sourcing this --# file so that they reflect the customisations you have added to the --# option parser. -- --# The usage line for option parsing errors and the start of '-h' and --# '--help' output messages. You can embed shell variables for delayed --# expansion at the time the message is displayed, but you will need to --# quote other shell meta-characters carefully to prevent them being --# expanded when the contents are evaled. --usage='$progpath [OPTION]...' -- --# Short help message in response to '-h' and '--help'. Add to this or --# override it after sourcing this library to reflect the full set of --# options your script accepts. --usage_message="\ -- --debug enable verbose shell tracing -- -W, --warnings=CATEGORY -- report the warnings falling in CATEGORY [all] -- -v, --verbose verbosely report processing -- --version print version information and exit -- -h, --help print short or long help message and exit --" -- --# Additional text appended to 'usage_message' in response to '--help'. --long_help_message=" --Warning categories include: -- 'all' show all warnings -- 'none' turn off all the warnings -- 'error' warnings are treated as fatal errors" -- --# Help message printed before fatal option parsing errors. --fatal_help="Try '\$progname --help' for more information." -- -- -- --## ------------------------- ## --## Hook function management. ## --## ------------------------- ## -- --# This section contains functions for adding, removing, and running hooks --# to the main code. A hook is just a named list of of function, that can --# be run in order later on. -- --# func_hookable FUNC_NAME --# ----------------------- --# Declare that FUNC_NAME will run hooks added with --# 'func_add_hook FUNC_NAME ...'. --func_hookable () --{ -- $debug_cmd -- -- func_append hookable_fns " $1" --} -- -- --# func_add_hook FUNC_NAME HOOK_FUNC --# --------------------------------- --# Request that FUNC_NAME call HOOK_FUNC before it returns. FUNC_NAME must --# first have been declared "hookable" by a call to 'func_hookable'. --func_add_hook () --{ -- $debug_cmd -- -- case " $hookable_fns " in -- *" $1 "*) ;; -- *) func_fatal_error "'$1' does not accept hook functions." ;; -- esac -- -- eval func_append ${1}_hooks '" $2"' --} -- -- --# func_remove_hook FUNC_NAME HOOK_FUNC --# ------------------------------------ --# Remove HOOK_FUNC from the list of functions called by FUNC_NAME. --func_remove_hook () --{ -- $debug_cmd -- -- eval ${1}_hooks='`$ECHO "\$'$1'_hooks" |$SED "s| '$2'||"`' --} -- -- --# func_run_hooks FUNC_NAME [ARG]... --# --------------------------------- --# Run all hook functions registered to FUNC_NAME. --# It is assumed that the list of hook functions contains nothing more --# than a whitespace-delimited list of legal shell function names, and --# no effort is wasted trying to catch shell meta-characters or preserve --# whitespace. --func_run_hooks () --{ -- $debug_cmd -- -- case " $hookable_fns " in -- *" $1 "*) ;; -- *) func_fatal_error "'$1' does not support hook funcions.n" ;; -- esac -- -- eval _G_hook_fns=\$$1_hooks; shift -- -- for _G_hook in $_G_hook_fns; do -- eval $_G_hook '"$@"' -- -- # store returned options list back into positional -- # parameters for next 'cmd' execution. -- eval _G_hook_result=\$${_G_hook}_result -- eval set dummy "$_G_hook_result"; shift -- done -- -- func_quote_for_eval ${1+"$@"} -- func_run_hooks_result=$func_quote_for_eval_result --} -- -- -- --## --------------- ## --## Option parsing. ## --## --------------- ## -- --# In order to add your own option parsing hooks, you must accept the --# full positional parameter list in your hook function, remove any --# options that you action, and then pass back the remaining unprocessed --# options in '<hooked_function_name>_result', escaped suitably for --# 'eval'. Like this: --# --# my_options_prep () --# { --# $debug_cmd --# --# # Extend the existing usage message. --# usage_message=$usage_message' --# -s, --silent don'\''t print informational messages --# ' --# --# func_quote_for_eval ${1+"$@"} --# my_options_prep_result=$func_quote_for_eval_result --# } --# func_add_hook func_options_prep my_options_prep --# --# --# my_silent_option () --# { --# $debug_cmd --# --# # Note that for efficiency, we parse as many options as we can --# # recognise in a loop before passing the remainder back to the --# # caller on the first unrecognised argument we encounter. --# while test $# -gt 0; do --# opt=$1; shift --# case $opt in --# --silent|-s) opt_silent=: ;; --# # Separate non-argument short options: --# -s*) func_split_short_opt "$_G_opt" --# set dummy "$func_split_short_opt_name" \ --# "-$func_split_short_opt_arg" ${1+"$@"} --# shift --# ;; --# *) set dummy "$_G_opt" "$*"; shift; break ;; --# esac --# done --# --# func_quote_for_eval ${1+"$@"} --# my_silent_option_result=$func_quote_for_eval_result --# } --# func_add_hook func_parse_options my_silent_option --# --# --# my_option_validation () --# { --# $debug_cmd --# --# $opt_silent && $opt_verbose && func_fatal_help "\ --# '--silent' and '--verbose' options are mutually exclusive." --# --# func_quote_for_eval ${1+"$@"} --# my_option_validation_result=$func_quote_for_eval_result --# } --# func_add_hook func_validate_options my_option_validation --# --# You'll alse need to manually amend $usage_message to reflect the extra --# options you parse. It's preferable to append if you can, so that --# multiple option parsing hooks can be added safely. -- -- --# func_options [ARG]... --# --------------------- --# All the functions called inside func_options are hookable. See the --# individual implementations for details. --func_hookable func_options --func_options () --{ -- $debug_cmd -- -- func_options_prep ${1+"$@"} -- eval func_parse_options \ -- ${func_options_prep_result+"$func_options_prep_result"} -- eval func_validate_options \ -- ${func_parse_options_result+"$func_parse_options_result"} -- -- eval func_run_hooks func_options \ -- ${func_validate_options_result+"$func_validate_options_result"} -- -- # save modified positional parameters for caller -- func_options_result=$func_run_hooks_result --} -- -- --# func_options_prep [ARG]... --# -------------------------- --# All initialisations required before starting the option parse loop. --# Note that when calling hook functions, we pass through the list of --# positional parameters. If a hook function modifies that list, and --# needs to propogate that back to rest of this script, then the complete --# modified list must be put in 'func_run_hooks_result' before --# returning. --func_hookable func_options_prep --func_options_prep () --{ -- $debug_cmd -- -- # Option defaults: -- opt_verbose=false -- opt_warning_types= -- -- func_run_hooks func_options_prep ${1+"$@"} -- -- # save modified positional parameters for caller -- func_options_prep_result=$func_run_hooks_result --} -- -- --# func_parse_options [ARG]... --# --------------------------- --# The main option parsing loop. --func_hookable func_parse_options --func_parse_options () --{ -- $debug_cmd -- -- func_parse_options_result= -- -- # this just eases exit handling -- while test $# -gt 0; do -- # Defer to hook functions for initial option parsing, so they -- # get priority in the event of reusing an option name. -- func_run_hooks func_parse_options ${1+"$@"} -- -- # Adjust func_parse_options positional parameters to match -- eval set dummy "$func_run_hooks_result"; shift -- -- # Break out of the loop if we already parsed every option. -- test $# -gt 0 || break -- -- _G_opt=$1 -- shift -- case $_G_opt in -- --debug|-x) debug_cmd='set -x' -- func_echo "enabling shell trace mode" -- $debug_cmd -- ;; -- -- --no-warnings|--no-warning|--no-warn) -- set dummy --warnings none ${1+"$@"} -- shift -- ;; -- -- --warnings|--warning|-W) -- test $# = 0 && func_missing_arg $_G_opt && break -- case " $warning_categories $1" in -- *" $1 "*) -- # trailing space prevents matching last $1 above -- func_append_uniq opt_warning_types " $1" -- ;; -- *all) -- opt_warning_types=$warning_categories -- ;; -- *none) -- opt_warning_types=none -- warning_func=: -- ;; -- *error) -- opt_warning_types=$warning_categories -- warning_func=func_fatal_error -- ;; -- *) -- func_fatal_error \ -- "unsupported warning category: '$1'" -- ;; -- esac -- shift -- ;; -- -- --verbose|-v) opt_verbose=: ;; -- --version) func_version ;; -- -\?|-h) func_usage ;; -- --help) func_help ;; -- -- # Separate optargs to long options (plugins may need this): -- --*=*) func_split_equals "$_G_opt" -- set dummy "$func_split_equals_lhs" \ -- "$func_split_equals_rhs" ${1+"$@"} -- shift -- ;; -- -- # Separate optargs to short options: -- -W*) -- func_split_short_opt "$_G_opt" -- set dummy "$func_split_short_opt_name" \ -- "$func_split_short_opt_arg" ${1+"$@"} -- shift -- ;; -- -- # Separate non-argument short options: -- -\?*|-h*|-v*|-x*) -- func_split_short_opt "$_G_opt" -- set dummy "$func_split_short_opt_name" \ -- "-$func_split_short_opt_arg" ${1+"$@"} -- shift -- ;; -- -- --) break ;; -- -*) func_fatal_help "unrecognised option: '$_G_opt'" ;; -- *) set dummy "$_G_opt" ${1+"$@"}; shift; break ;; -- esac -- done -- -- # save modified positional parameters for caller -- func_quote_for_eval ${1+"$@"} -- func_parse_options_result=$func_quote_for_eval_result -+ case $1 in -+ [0-9]* | *[!a-zA-Z0-9_]*) -+ func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'` -+ ;; -+ * ) -+ func_tr_sh_result=$1 -+ ;; -+ esac - } - - --# func_validate_options [ARG]... --# ------------------------------ --# Perform any sanity checks on option settings and/or unconsumed --# arguments. --func_hookable func_validate_options --func_validate_options () -+# func_version -+# Echo version message to standard output and exit. -+func_version () - { -- $debug_cmd -- -- # Display all warnings if -W was not given. -- test -n "$opt_warning_types" || opt_warning_types=" $warning_categories" -- -- func_run_hooks func_validate_options ${1+"$@"} -+ $opt_debug - -- # Bail if the options were screwed! -- $exit_cmd $EXIT_FAILURE -- -- # save modified positional parameters for caller -- func_validate_options_result=$func_run_hooks_result -+ $SED -n '/(C)/!b go -+ :more -+ /\./!{ -+ N -+ s/\n# / / -+ b more -+ } -+ :go -+ /^# '$PROGRAM' (GNU /,/# warranty; / { -+ s/^# // -+ s/^# *$// -+ s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/ -+ p -+ }' < "$progpath" -+ exit $? - } - -- -- --## ----------------- ## --## Helper functions. ## --## ----------------- ## -- --# This section contains the helper functions used by the rest of the --# hookable option parser framework in ascii-betical order. -- -- --# func_fatal_help ARG... --# ---------------------- --# Echo program name prefixed message to standard error, followed by --# a help hint, and exit. --func_fatal_help () -+# func_usage -+# Echo short help message to standard output and exit. -+func_usage () - { -- $debug_cmd -+ $opt_debug - -- eval \$ECHO \""Usage: $usage"\" -- eval \$ECHO \""$fatal_help"\" -- func_error ${1+"$@"} -- exit $EXIT_FAILURE -+ $SED -n '/^# Usage:/,/^# *.*--help/ { -+ s/^# // -+ s/^# *$// -+ s/\$progname/'$progname'/ -+ p -+ }' < "$progpath" -+ echo -+ $ECHO "run \`$progname --help | more' for full usage" -+ exit $? - } - -- --# func_help --# --------- --# Echo long help message to standard output and exit. -+# func_help [NOEXIT] -+# Echo long help message to standard output and exit, -+# unless 'noexit' is passed as argument. - func_help () - { -- $debug_cmd -+ $opt_debug - -- func_usage_message -- $ECHO "$long_help_message" -- exit 0 -+ $SED -n '/^# Usage:/,/# Report bugs to/ { -+ :print -+ s/^# // -+ s/^# *$// -+ s*\$progname*'$progname'* -+ s*\$host*'"$host"'* -+ s*\$SHELL*'"$SHELL"'* -+ s*\$LTCC*'"$LTCC"'* -+ s*\$LTCFLAGS*'"$LTCFLAGS"'* -+ s*\$LD*'"$LD"'* -+ s/\$with_gnu_ld/'"$with_gnu_ld"'/ -+ s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/ -+ s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/ -+ p -+ d -+ } -+ /^# .* home page:/b print -+ /^# General help using/b print -+ ' < "$progpath" -+ ret=$? -+ if test -z "$1"; then -+ exit $ret -+ fi - } - -- --# func_missing_arg ARGNAME --# ------------------------ -+# func_missing_arg argname - # Echo program name prefixed message to standard error and set global - # exit_cmd. - func_missing_arg () - { -- $debug_cmd -+ $opt_debug - -- func_error "Missing argument for '$1'." -+ func_error "missing argument for $1." - exit_cmd=exit - } - - --# func_split_equals STRING --# ------------------------ --# Set func_split_equals_lhs and func_split_equals_rhs shell variables after --# splitting STRING at the '=' sign. --test -z "$_G_HAVE_XSI_OPS" \ -- && (eval 'x=a/b/c; -- test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \ -- && _G_HAVE_XSI_OPS=yes -- --if test yes = "$_G_HAVE_XSI_OPS" --then -- # This is an XSI compatible shell, allowing a faster implementation... -- eval 'func_split_equals () -- { -- $debug_cmd -- -- func_split_equals_lhs=${1%%=*} -- func_split_equals_rhs=${1#*=} -- test "x$func_split_equals_lhs" = "x$1" \ -- && func_split_equals_rhs= -- }' --else -- # ...otherwise fall back to using expr, which is often a shell builtin. -- func_split_equals () -- { -- $debug_cmd -- -- func_split_equals_lhs=`expr "x$1" : 'x\([^=]*\)'` -- func_split_equals_rhs= -- test "x$func_split_equals_lhs" = "x$1" \ -- || func_split_equals_rhs=`expr "x$1" : 'x[^=]*=\(.*\)$'` -- } --fi #func_split_equals -- -- --# func_split_short_opt SHORTOPT --# ----------------------------- -+# func_split_short_opt shortopt - # Set func_split_short_opt_name and func_split_short_opt_arg shell - # variables after splitting SHORTOPT after the 2nd character. --if test yes = "$_G_HAVE_XSI_OPS" --then -- # This is an XSI compatible shell, allowing a faster implementation... -- eval 'func_split_short_opt () -- { -- $debug_cmd -- -- func_split_short_opt_arg=${1#??} -- func_split_short_opt_name=${1%"$func_split_short_opt_arg"} -- }' --else -- # ...otherwise fall back to using expr, which is often a shell builtin. -- func_split_short_opt () -- { -- $debug_cmd -- -- func_split_short_opt_name=`expr "x$1" : 'x-\(.\)'` -- func_split_short_opt_arg=`expr "x$1" : 'x-.\(.*\)$'` -- } --fi #func_split_short_opt -- -- --# func_usage --# ---------- --# Echo short help message to standard output and exit. --func_usage () -+func_split_short_opt () - { -- $debug_cmd -+ my_sed_short_opt='1s/^\(..\).*$/\1/;q' -+ my_sed_short_rest='1s/^..\(.*\)$/\1/;q' - -- func_usage_message -- $ECHO "Run '$progname --help |${PAGER-more}' for full usage" -- exit 0 --} -+ func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"` -+ func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"` -+} # func_split_short_opt may be replaced by extended shell implementation - - --# func_usage_message --# ------------------ --# Echo short help message to standard output. --func_usage_message () -+# func_split_long_opt longopt -+# Set func_split_long_opt_name and func_split_long_opt_arg shell -+# variables after splitting LONGOPT at the `=' sign. -+func_split_long_opt () - { -- $debug_cmd -+ my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q' -+ my_sed_long_arg='1s/^--[^=]*=//' - -- eval \$ECHO \""Usage: $usage"\" -- echo -- $SED -n 's|^# || -- /^Written by/{ -- x;p;x -- } -- h -- /^Written by/q' < "$progpath" -- echo -- eval \$ECHO \""$usage_message"\" --} -+ func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"` -+ func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"` -+} # func_split_long_opt may be replaced by extended shell implementation - -+exit_cmd=: - --# func_version --# ------------ --# Echo version message to standard output and exit. --func_version () --{ -- $debug_cmd - -- printf '%s\n' "$progname $scriptversion" -- $SED -n ' -- /(C)/!b go -- :more -- /\./!{ -- N -- s|\n# | | -- b more -- } -- :go -- /^# Written by /,/# warranty; / { -- s|^# || -- s|^# *$|| -- s|\((C)\)[ 0-9,-]*[ ,-]\([1-9][0-9]* \)|\1 \2| -- p -- } -- /^# Written by / { -- s|^# || -- p -- } -- /^warranty; /q' < "$progpath" - -- exit $? --} - - --# Local variables: --# mode: shell-script --# sh-indentation: 2 --# eval: (add-hook 'before-save-hook 'time-stamp) --# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC" --# time-stamp-time-zone: "UTC" --# End: -+magic="%%%MAGIC variable%%%" -+magic_exe="%%%MAGIC EXE variable%%%" - --# Set a version string. --scriptversion='(GNU libtool) 2.4.6' -+# Global variables. -+nonopt= -+preserve_args= -+lo2o="s/\\.lo\$/.${objext}/" -+o2lo="s/\\.${objext}\$/.lo/" -+extracted_archives= -+extracted_serial=0 - -+# If this variable is set in any of the actions, the command in it -+# will be execed at the end. This prevents here-documents from being -+# left over by shells. -+exec_cmd= - --# func_echo ARG... --# ---------------- --# Libtool also displays the current mode in messages, so override --# funclib.sh func_echo with this custom definition. --func_echo () -+# func_append var value -+# Append VALUE to the end of shell variable VAR. -+func_append () - { -- $debug_cmd -- -- _G_message=$* -- -- func_echo_IFS=$IFS -- IFS=$nl -- for _G_line in $_G_message; do -- IFS=$func_echo_IFS -- $ECHO "$progname${opt_mode+: $opt_mode}: $_G_line" -- done -- IFS=$func_echo_IFS --} -+ eval "${1}=\$${1}\${2}" -+} # func_append may be replaced by extended shell implementation - -- --# func_warning ARG... --# ------------------- --# Libtool warnings are not categorized, so override funclib.sh --# func_warning with this simpler definition. --func_warning () -+# func_append_quoted var value -+# Quote VALUE and append to the end of shell variable VAR, separated -+# by a space. -+func_append_quoted () - { -- $debug_cmd -+ func_quote_for_eval "${2}" -+ eval "${1}=\$${1}\\ \$func_quote_for_eval_result" -+} # func_append_quoted may be replaced by extended shell implementation - -- $warning_func ${1+"$@"} --} -- -- --## ---------------- ## --## Options parsing. ## --## ---------------- ## -- --# Hook in the functions to make sure our own options are parsed during --# the option parsing loop. -- --usage='$progpath [OPTION]... [MODE-ARG]...' -- --# Short help message in response to '-h'. --usage_message="Options: -- --config show all configuration variables -- --debug enable verbose shell tracing -- -n, --dry-run display commands without modifying any files -- --features display basic configuration information and exit -- --mode=MODE use operation mode MODE -- --no-warnings equivalent to '-Wnone' -- --preserve-dup-deps don't remove duplicate dependency libraries -- --quiet, --silent don't print informational messages -- --tag=TAG use configuration variables from tag TAG -- -v, --verbose print more informational messages than default -- --version print version information -- -W, --warnings=CATEGORY report the warnings falling in CATEGORY [all] -- -h, --help, --help-all print short, long, or detailed help message --" - --# Additional text appended to 'usage_message' in response to '--help'. --func_help () -+# func_arith arithmetic-term... -+func_arith () - { -- $debug_cmd -- -- func_usage_message -- $ECHO "$long_help_message -- --MODE must be one of the following: -- -- clean remove files from the build directory -- compile compile a source file into a libtool object -- execute automatically set library path, then run a program -- finish complete the installation of libtool libraries -- install install libraries or executables -- link create a library or an executable -- uninstall remove libraries from an installed directory -- --MODE-ARGS vary depending on the MODE. When passed as first option, --'--mode=MODE' may be abbreviated as 'MODE' or a unique abbreviation of that. --Try '$progname --help --mode=MODE' for a more detailed description of MODE. -- --When reporting a bug, please describe a test case to reproduce it and --include the following information: -- -- host-triplet: $host -- shell: $SHELL -- compiler: $LTCC -- compiler flags: $LTCFLAGS -- linker: $LD (gnu? $with_gnu_ld) -- version: $progname (GNU libtool) 2.4.6 -- automake: `($AUTOMAKE --version) 2>/dev/null |$SED 1q` -- autoconf: `($AUTOCONF --version) 2>/dev/null |$SED 1q` -- --Report bugs to <bug-libtool@gnu.org>. --GNU libtool home page: <http://www.gnu.org/software/libtool/>. --General help using GNU software: <http://www.gnu.org/gethelp/>." -- exit 0 --} -+ func_arith_result=`expr "${@}"` -+} # func_arith may be replaced by extended shell implementation - - --# func_lo2o OBJECT-NAME --# --------------------- --# Transform OBJECT-NAME from a '.lo' suffix to the platform specific --# object suffix. -+# func_len string -+# STRING may not start with a hyphen. -+func_len () -+{ -+ func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len` -+} # func_len may be replaced by extended shell implementation - --lo2o=s/\\.lo\$/.$objext/ --o2lo=s/\\.$objext\$/.lo/ - --if test yes = "$_G_HAVE_XSI_OPS"; then -- eval 'func_lo2o () -- { -- case $1 in -- *.lo) func_lo2o_result=${1%.lo}.$objext ;; -- * ) func_lo2o_result=$1 ;; -- esac -- }' -+# func_lo2o object -+func_lo2o () -+{ -+ func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` -+} # func_lo2o may be replaced by extended shell implementation - -- # func_xform LIBOBJ-OR-SOURCE -- # --------------------------- -- # Transform LIBOBJ-OR-SOURCE from a '.o' or '.c' (or otherwise) -- # suffix to a '.lo' libtool-object suffix. -- eval 'func_xform () -- { -- func_xform_result=${1%.*}.lo -- }' --else -- # ...otherwise fall back to using sed. -- func_lo2o () -- { -- func_lo2o_result=`$ECHO "$1" | $SED "$lo2o"` -- } - -- func_xform () -- { -- func_xform_result=`$ECHO "$1" | $SED 's|\.[^.]*$|.lo|'` -- } --fi -+# func_xform libobj-or-source -+func_xform () -+{ -+ func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` -+} # func_xform may be replaced by extended shell implementation - - --# func_fatal_configuration ARG... --# ------------------------------- -+# func_fatal_configuration arg... - # Echo program name prefixed message to standard error, followed by - # a configuration failure hint, and exit. - func_fatal_configuration () - { -- func__fatal_error ${1+"$@"} \ -- "See the $PACKAGE documentation for more information." \ -- "Fatal configuration error." -+ func_error ${1+"$@"} -+ func_error "See the $PACKAGE documentation for more information." -+ func_fatal_error "Fatal configuration error." - } - - - # func_config --# ----------- - # Display the configuration for all the tags in this script. - func_config () - { -@@ -2149,19 +915,17 @@ - exit $? - } - -- - # func_features --# ------------- - # Display the features supported by this script. - func_features () - { - echo "host: $host" -- if test yes = "$build_libtool_libs"; then -+ if test "$build_libtool_libs" = yes; then - echo "enable shared libraries" - else - echo "disable shared libraries" - fi -- if test yes = "$build_old_libs"; then -+ if test "$build_old_libs" = yes; then - echo "enable static libraries" - else - echo "disable static libraries" -@@ -2170,297 +934,289 @@ - exit $? - } - -- --# func_enable_tag TAGNAME --# ----------------------- -+# func_enable_tag tagname - # Verify that TAGNAME is valid, and either flag an error and exit, or - # enable the TAGNAME tag. We also add TAGNAME to the global $taglist - # variable here. - func_enable_tag () - { -- # Global variable: -- tagname=$1 -+ # Global variable: -+ tagname="$1" - -- re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$" -- re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$" -- sed_extractcf=/$re_begincf/,/$re_endcf/p -- -- # Validate tagname. -- case $tagname in -- *[!-_A-Za-z0-9,/]*) -- func_fatal_error "invalid tag name: $tagname" -- ;; -- esac -+ re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$" -+ re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$" -+ sed_extractcf="/$re_begincf/,/$re_endcf/p" -+ -+ # Validate tagname. -+ case $tagname in -+ *[!-_A-Za-z0-9,/]*) -+ func_fatal_error "invalid tag name: $tagname" -+ ;; -+ esac - -- # Don't test for the "default" C tag, as we know it's -- # there but not specially marked. -- case $tagname in -- CC) ;; -+ # Don't test for the "default" C tag, as we know it's -+ # there but not specially marked. -+ case $tagname in -+ CC) ;; - *) -- if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then -- taglist="$taglist $tagname" -+ if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then -+ taglist="$taglist $tagname" - -- # Evaluate the configuration. Be careful to quote the path -- # and the sed script, to avoid splitting on whitespace, but -- # also don't use non-portable quotes within backquotes within -- # quotes we have to do it in 2 steps: -- extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` -- eval "$extractedcf" -- else -- func_error "ignoring unknown tag $tagname" -- fi -- ;; -- esac -+ # Evaluate the configuration. Be careful to quote the path -+ # and the sed script, to avoid splitting on whitespace, but -+ # also don't use non-portable quotes within backquotes within -+ # quotes we have to do it in 2 steps: -+ extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` -+ eval "$extractedcf" -+ else -+ func_error "ignoring unknown tag $tagname" -+ fi -+ ;; -+ esac - } - -- - # func_check_version_match --# ------------------------ - # Ensure that we are using m4 macros, and libtool script from the same - # release of libtool. - func_check_version_match () - { -- if test "$package_revision" != "$macro_revision"; then -- if test "$VERSION" != "$macro_version"; then -- if test -z "$macro_version"; then -- cat >&2 <<_LT_EOF -+ if test "$package_revision" != "$macro_revision"; then -+ if test "$VERSION" != "$macro_version"; then -+ if test -z "$macro_version"; then -+ cat >&2 <<_LT_EOF - $progname: Version mismatch error. This is $PACKAGE $VERSION, but the - $progname: definition of this LT_INIT comes from an older release. - $progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION - $progname: and run autoconf again. - _LT_EOF -- else -- cat >&2 <<_LT_EOF -+ else -+ cat >&2 <<_LT_EOF - $progname: Version mismatch error. This is $PACKAGE $VERSION, but the - $progname: definition of this LT_INIT comes from $PACKAGE $macro_version. - $progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION - $progname: and run autoconf again. - _LT_EOF -- fi -- else -- cat >&2 <<_LT_EOF -+ fi -+ else -+ cat >&2 <<_LT_EOF - $progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, - $progname: but the definition of this LT_INIT comes from revision $macro_revision. - $progname: You should recreate aclocal.m4 with macros from revision $package_revision - $progname: of $PACKAGE $VERSION and run autoconf again. - _LT_EOF -- fi -- -- exit $EXIT_MISMATCH - fi -+ -+ exit $EXIT_MISMATCH -+ fi - } - - --# libtool_options_prep [ARG]... --# ----------------------------- --# Preparation for options parsed by libtool. --libtool_options_prep () --{ -- $debug_mode -- -- # Option defaults: -- opt_config=false -- opt_dlopen= -- opt_dry_run=false -- opt_help=false -- opt_mode= -- opt_preserve_dup_deps=false -- opt_quiet=false -+# Shorthand for --mode=foo, only valid as the first argument -+case $1 in -+clean|clea|cle|cl) -+ shift; set dummy --mode clean ${1+"$@"}; shift -+ ;; -+compile|compil|compi|comp|com|co|c) -+ shift; set dummy --mode compile ${1+"$@"}; shift -+ ;; -+execute|execut|execu|exec|exe|ex|e) -+ shift; set dummy --mode execute ${1+"$@"}; shift -+ ;; -+finish|finis|fini|fin|fi|f) -+ shift; set dummy --mode finish ${1+"$@"}; shift -+ ;; -+install|instal|insta|inst|ins|in|i) -+ shift; set dummy --mode install ${1+"$@"}; shift -+ ;; -+link|lin|li|l) -+ shift; set dummy --mode link ${1+"$@"}; shift -+ ;; -+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) -+ shift; set dummy --mode uninstall ${1+"$@"}; shift -+ ;; -+esac - -- nonopt= -- preserve_args= - -- # Shorthand for --mode=foo, only valid as the first argument -- case $1 in -- clean|clea|cle|cl) -- shift; set dummy --mode clean ${1+"$@"}; shift -- ;; -- compile|compil|compi|comp|com|co|c) -- shift; set dummy --mode compile ${1+"$@"}; shift -- ;; -- execute|execut|execu|exec|exe|ex|e) -- shift; set dummy --mode execute ${1+"$@"}; shift -- ;; -- finish|finis|fini|fin|fi|f) -- shift; set dummy --mode finish ${1+"$@"}; shift -- ;; -- install|instal|insta|inst|ins|in|i) -- shift; set dummy --mode install ${1+"$@"}; shift -- ;; -- link|lin|li|l) -- shift; set dummy --mode link ${1+"$@"}; shift -- ;; -- uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) -- shift; set dummy --mode uninstall ${1+"$@"}; shift -- ;; -- esac - -- # Pass back the list of options. -- func_quote_for_eval ${1+"$@"} -- libtool_options_prep_result=$func_quote_for_eval_result --} --func_add_hook func_options_prep libtool_options_prep -+# Option defaults: -+opt_debug=: -+opt_dry_run=false -+opt_config=false -+opt_preserve_dup_deps=false -+opt_features=false -+opt_finish=false -+opt_help=false -+opt_help_all=false -+opt_silent=: -+opt_warning=: -+opt_verbose=: -+opt_silent=false -+opt_verbose=false - - --# libtool_parse_options [ARG]... --# --------------------------------- --# Provide handling for libtool specific options. --libtool_parse_options () -+# Parse options once, thoroughly. This comes as soon as possible in the -+# script to make things like `--version' happen as quickly as we can. - { -- $debug_cmd -- -- # Perform our own loop to consume as many options as possible in -- # each iteration. -- while test $# -gt 0; do -- _G_opt=$1 -- shift -- case $_G_opt in -- --dry-run|--dryrun|-n) -- opt_dry_run=: -- ;; -- -- --config) func_config ;; -- -- --dlopen|-dlopen) -- opt_dlopen="${opt_dlopen+$opt_dlopen --}$1" -- shift -- ;; -- -- --preserve-dup-deps) -- opt_preserve_dup_deps=: ;; -- -- --features) func_features ;; -- -- --finish) set dummy --mode finish ${1+"$@"}; shift ;; -- -- --help) opt_help=: ;; -- -- --help-all) opt_help=': help-all' ;; -- -- --mode) test $# = 0 && func_missing_arg $_G_opt && break -- opt_mode=$1 -- case $1 in -- # Valid mode arguments: -- clean|compile|execute|finish|install|link|relink|uninstall) ;; -- -- # Catch anything else as an error -- *) func_error "invalid argument for $_G_opt" -- exit_cmd=exit -- break -- ;; -- esac -- shift -- ;; -- -- --no-silent|--no-quiet) -- opt_quiet=false -- func_append preserve_args " $_G_opt" -- ;; -- -- --no-warnings|--no-warning|--no-warn) -- opt_warning=false -- func_append preserve_args " $_G_opt" -- ;; -- -- --no-verbose) -- opt_verbose=false -- func_append preserve_args " $_G_opt" -- ;; -- -- --silent|--quiet) -- opt_quiet=: -- opt_verbose=false -- func_append preserve_args " $_G_opt" -- ;; -- -- --tag) test $# = 0 && func_missing_arg $_G_opt && break -- opt_tag=$1 -- func_append preserve_args " $_G_opt $1" -- func_enable_tag "$1" -- shift -- ;; -- -- --verbose|-v) opt_quiet=false -- opt_verbose=: -- func_append preserve_args " $_G_opt" -- ;; -+ # this just eases exit handling -+ while test $# -gt 0; do -+ opt="$1" -+ shift -+ case $opt in -+ --debug|-x) opt_debug='set -x' -+ func_echo "enabling shell trace mode" -+ $opt_debug -+ ;; -+ --dry-run|--dryrun|-n) -+ opt_dry_run=: -+ ;; -+ --config) -+ opt_config=: -+func_config -+ ;; -+ --dlopen|-dlopen) -+ optarg="$1" -+ opt_dlopen="${opt_dlopen+$opt_dlopen -+}$optarg" -+ shift -+ ;; -+ --preserve-dup-deps) -+ opt_preserve_dup_deps=: -+ ;; -+ --features) -+ opt_features=: -+func_features -+ ;; -+ --finish) -+ opt_finish=: -+set dummy --mode finish ${1+"$@"}; shift -+ ;; -+ --help) -+ opt_help=: -+ ;; -+ --help-all) -+ opt_help_all=: -+opt_help=': help-all' -+ ;; -+ --mode) -+ test $# = 0 && func_missing_arg $opt && break -+ optarg="$1" -+ opt_mode="$optarg" -+case $optarg in -+ # Valid mode arguments: -+ clean|compile|execute|finish|install|link|relink|uninstall) ;; -+ -+ # Catch anything else as an error -+ *) func_error "invalid argument for $opt" -+ exit_cmd=exit -+ break -+ ;; -+esac -+ shift -+ ;; -+ --no-silent|--no-quiet) -+ opt_silent=false -+func_append preserve_args " $opt" -+ ;; -+ --no-warning|--no-warn) -+ opt_warning=false -+func_append preserve_args " $opt" -+ ;; -+ --no-verbose) -+ opt_verbose=false -+func_append preserve_args " $opt" -+ ;; -+ --silent|--quiet) -+ opt_silent=: -+func_append preserve_args " $opt" -+ opt_verbose=false -+ ;; -+ --verbose|-v) -+ opt_verbose=: -+func_append preserve_args " $opt" -+opt_silent=false -+ ;; -+ --tag) -+ test $# = 0 && func_missing_arg $opt && break -+ optarg="$1" -+ opt_tag="$optarg" -+func_append preserve_args " $opt $optarg" -+func_enable_tag "$optarg" -+ shift -+ ;; -+ -+ -\?|-h) func_usage ;; -+ --help) func_help ;; -+ --version) func_version ;; -+ -+ # Separate optargs to long options: -+ --*=*) -+ func_split_long_opt "$opt" -+ set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"} -+ shift -+ ;; -+ -+ # Separate non-argument short options: -+ -\?*|-h*|-n*|-v*) -+ func_split_short_opt "$opt" -+ set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"} -+ shift -+ ;; -+ -+ --) break ;; -+ -*) func_fatal_help "unrecognized option \`$opt'" ;; -+ *) set dummy "$opt" ${1+"$@"}; shift; break ;; -+ esac -+ done - -- # An option not handled by this hook function: -- *) set dummy "$_G_opt" ${1+"$@"}; shift; break ;; -- esac -- done -+ # Validate options: - -+ # save first non-option argument -+ if test "$#" -gt 0; then -+ nonopt="$opt" -+ shift -+ fi - -- # save modified positional parameters for caller -- func_quote_for_eval ${1+"$@"} -- libtool_parse_options_result=$func_quote_for_eval_result --} --func_add_hook func_parse_options libtool_parse_options -+ # preserve --debug -+ test "$opt_debug" = : || func_append preserve_args " --debug" - -+ case $host in -+ *cygwin* | *mingw* | *pw32* | *cegcc*) -+ # don't eliminate duplications in $postdeps and $predeps -+ opt_duplicate_compiler_generated_deps=: -+ ;; -+ *) -+ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps -+ ;; -+ esac - -+ $opt_help || { -+ # Sanity checks first: -+ func_check_version_match - --# libtool_validate_options [ARG]... --# --------------------------------- --# Perform any sanity checks on option settings and/or unconsumed --# arguments. --libtool_validate_options () --{ -- # save first non-option argument -- if test 0 -lt $#; then -- nonopt=$1 -- shift -+ if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then -+ func_fatal_configuration "not configured to build any kind of library" - fi - -- # preserve --debug -- test : = "$debug_cmd" || func_append preserve_args " --debug" -+ # Darwin sucks -+ eval std_shrext=\"$shrext_cmds\" - -- case $host in -- # Solaris2 added to fix http://debbugs.gnu.org/cgi/bugreport.cgi?bug=16452 -- # see also: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=59788 -- *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* | *os2*) -- # don't eliminate duplications in $postdeps and $predeps -- opt_duplicate_compiler_generated_deps=: -- ;; -- *) -- opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps -- ;; -- esac -+ # Only execute mode is allowed to have -dlopen flags. -+ if test -n "$opt_dlopen" && test "$opt_mode" != execute; then -+ func_error "unrecognized option \`-dlopen'" -+ $ECHO "$help" 1>&2 -+ exit $EXIT_FAILURE -+ fi - -- $opt_help || { -- # Sanity checks first: -- func_check_version_match -- -- test yes != "$build_libtool_libs" \ -- && test yes != "$build_old_libs" \ -- && func_fatal_configuration "not configured to build any kind of library" -- -- # Darwin sucks -- eval std_shrext=\"$shrext_cmds\" -- -- # Only execute mode is allowed to have -dlopen flags. -- if test -n "$opt_dlopen" && test execute != "$opt_mode"; then -- func_error "unrecognized option '-dlopen'" -- $ECHO "$help" 1>&2 -- exit $EXIT_FAILURE -- fi -- -- # Change the help message to a mode-specific one. -- generic_help=$help -- help="Try '$progname --help --mode=$opt_mode' for more information." -- } -+ # Change the help message to a mode-specific one. -+ generic_help="$help" -+ help="Try \`$progname --help --mode=$opt_mode' for more information." -+ } - -- # Pass back the unparsed argument list -- func_quote_for_eval ${1+"$@"} -- libtool_validate_options_result=$func_quote_for_eval_result --} --func_add_hook func_validate_options libtool_validate_options - -+ # Bail if the options were screwed -+ $exit_cmd $EXIT_FAILURE -+} - --# Process options as early as possible so that --help and --version --# can return quickly. --func_options ${1+"$@"} --eval set dummy "$func_options_result"; shift - - - -@@ -2468,52 +1224,24 @@ - ## Main. ## - ## ----------- ## - --magic='%%%MAGIC variable%%%' --magic_exe='%%%MAGIC EXE variable%%%' -- --# Global variables. --extracted_archives= --extracted_serial=0 -- --# If this variable is set in any of the actions, the command in it --# will be execed at the end. This prevents here-documents from being --# left over by shells. --exec_cmd= -- -- --# A function that is used when there is no print builtin or printf. --func_fallback_echo () --{ -- eval 'cat <<_LTECHO_EOF --$1 --_LTECHO_EOF' --} -- --# func_generated_by_libtool --# True iff stdin has been generated by Libtool. This function is only --# a basic sanity check; it will hardly flush out determined imposters. --func_generated_by_libtool_p () --{ -- $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 --} -- - # func_lalib_p file --# True iff FILE is a libtool '.la' library or '.lo' object file. -+# True iff FILE is a libtool `.la' library or `.lo' object file. - # This function is only a basic sanity check; it will hardly flush out - # determined imposters. - func_lalib_p () - { - test -f "$1" && -- $SED -e 4q "$1" 2>/dev/null | func_generated_by_libtool_p -+ $SED -e 4q "$1" 2>/dev/null \ -+ | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 - } - - # func_lalib_unsafe_p file --# True iff FILE is a libtool '.la' library or '.lo' object file. -+# True iff FILE is a libtool `.la' library or `.lo' object file. - # This function implements the same check as func_lalib_p without - # resorting to external programs. To this end, it redirects stdin and - # closes it afterwards, without saving the original file descriptor. - # As a safety measure, use it only where a negative result would be --# fatal anyway. Works if 'file' does not exist. -+# fatal anyway. Works if `file' does not exist. - func_lalib_unsafe_p () - { - lalib_p=no -@@ -2521,13 +1249,13 @@ - for lalib_p_l in 1 2 3 4 - do - read lalib_p_line -- case $lalib_p_line in -+ case "$lalib_p_line" in - \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;; - esac - done - exec 0<&5 5<&- - fi -- test yes = "$lalib_p" -+ test "$lalib_p" = yes - } - - # func_ltwrapper_script_p file -@@ -2536,8 +1264,7 @@ - # determined imposters. - func_ltwrapper_script_p () - { -- test -f "$1" && -- $lt_truncate_bin < "$1" 2>/dev/null | func_generated_by_libtool_p -+ func_lalib_p "$1" - } - - # func_ltwrapper_executable_p file -@@ -2562,7 +1289,7 @@ - { - func_dirname_and_basename "$1" "" "." - func_stripname '' '.exe' "$func_basename_result" -- func_ltwrapper_scriptname_result=$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper -+ func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" - } - - # func_ltwrapper_p file -@@ -2581,13 +1308,11 @@ - # FAIL_CMD may read-access the current command in variable CMD! - func_execute_cmds () - { -- $debug_cmd -- -+ $opt_debug - save_ifs=$IFS; IFS='~' - for cmd in $1; do -- IFS=$sp$nl -- eval cmd=\"$cmd\" - IFS=$save_ifs -+ eval cmd=\"$cmd\" - func_show_eval "$cmd" "${2-:}" - done - IFS=$save_ifs -@@ -2599,11 +1324,10 @@ - # Note that it is not necessary on cygwin/mingw to append a dot to - # FILE even if both FILE and FILE.exe exist: automatic-append-.exe - # behavior happens only for exec(3), not for open(2)! Also, sourcing --# 'FILE.' does not work on cygwin managed mounts. -+# `FILE.' does not work on cygwin managed mounts. - func_source () - { -- $debug_cmd -- -+ $opt_debug - case $1 in - */* | *\\*) . "$1" ;; - *) . "./$1" ;; -@@ -2630,10 +1354,10 @@ - # store the result into func_replace_sysroot_result. - func_replace_sysroot () - { -- case $lt_sysroot:$1 in -+ case "$lt_sysroot:$1" in - ?*:"$lt_sysroot"*) - func_stripname "$lt_sysroot" '' "$1" -- func_replace_sysroot_result='='$func_stripname_result -+ func_replace_sysroot_result="=$func_stripname_result" - ;; - *) - # Including no sysroot. -@@ -2650,8 +1374,7 @@ - # arg is usually of the form 'gcc ...' - func_infer_tag () - { -- $debug_cmd -- -+ $opt_debug - if test -n "$available_tags" && test -z "$tagname"; then - CC_quoted= - for arg in $CC; do -@@ -2670,7 +1393,7 @@ - for z in $available_tags; do - if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then - # Evaluate the configuration. -- eval "`$SED -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" -+ eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" - CC_quoted= - for arg in $CC; do - # Double-quote args containing other shell metacharacters. -@@ -2695,7 +1418,7 @@ - # line option must be used. - if test -z "$tagname"; then - func_echo "unable to infer tagged configuration" -- func_fatal_error "specify a tag with '--tag'" -+ func_fatal_error "specify a tag with \`--tag'" - # else - # func_verbose "using $tagname tagged configuration" - fi -@@ -2711,15 +1434,15 @@ - # but don't create it if we're doing a dry run. - func_write_libtool_object () - { -- write_libobj=$1 -- if test yes = "$build_libtool_libs"; then -- write_lobj=\'$2\' -+ write_libobj=${1} -+ if test "$build_libtool_libs" = yes; then -+ write_lobj=\'${2}\' - else - write_lobj=none - fi - -- if test yes = "$build_old_libs"; then -- write_oldobj=\'$3\' -+ if test "$build_old_libs" = yes; then -+ write_oldobj=\'${3}\' - else - write_oldobj=none - fi -@@ -2727,7 +1450,7 @@ - $opt_dry_run || { - cat >${write_libobj}T <<EOF - # $write_libobj - a libtool object file --# Generated by $PROGRAM (GNU $PACKAGE) $VERSION -+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - # - # Please DO NOT delete this file! - # It is necessary for linking the library. -@@ -2739,7 +1462,7 @@ - non_pic_object=$write_oldobj - - EOF -- $MV "${write_libobj}T" "$write_libobj" -+ $MV "${write_libobj}T" "${write_libobj}" - } - } - -@@ -2759,9 +1482,8 @@ - # be empty on error (or when ARG is empty) - func_convert_core_file_wine_to_w32 () - { -- $debug_cmd -- -- func_convert_core_file_wine_to_w32_result=$1 -+ $opt_debug -+ func_convert_core_file_wine_to_w32_result="$1" - if test -n "$1"; then - # Unfortunately, winepath does not exit with a non-zero error code, so we - # are forced to check the contents of stdout. On the other hand, if the -@@ -2769,9 +1491,9 @@ - # *an error message* to stdout. So we must check for both error code of - # zero AND non-empty stdout, which explains the odd construction: - func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null` -- if test "$?" -eq 0 && test -n "$func_convert_core_file_wine_to_w32_tmp"; then -+ if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then - func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" | -- $SED -e "$sed_naive_backslashify"` -+ $SED -e "$lt_sed_naive_backslashify"` - else - func_convert_core_file_wine_to_w32_result= - fi -@@ -2792,19 +1514,18 @@ - # are convertible, then the result may be empty. - func_convert_core_path_wine_to_w32 () - { -- $debug_cmd -- -+ $opt_debug - # unfortunately, winepath doesn't convert paths, only file names -- func_convert_core_path_wine_to_w32_result= -+ func_convert_core_path_wine_to_w32_result="" - if test -n "$1"; then - oldIFS=$IFS - IFS=: - for func_convert_core_path_wine_to_w32_f in $1; do - IFS=$oldIFS - func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f" -- if test -n "$func_convert_core_file_wine_to_w32_result"; then -+ if test -n "$func_convert_core_file_wine_to_w32_result" ; then - if test -z "$func_convert_core_path_wine_to_w32_result"; then -- func_convert_core_path_wine_to_w32_result=$func_convert_core_file_wine_to_w32_result -+ func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result" - else - func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result" - fi -@@ -2833,8 +1554,7 @@ - # environment variable; do not put it in $PATH. - func_cygpath () - { -- $debug_cmd -- -+ $opt_debug - if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then - func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null` - if test "$?" -ne 0; then -@@ -2843,7 +1563,7 @@ - fi - else - func_cygpath_result= -- func_error "LT_CYGPATH is empty or specifies non-existent file: '$LT_CYGPATH'" -+ func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'" - fi - } - #end: func_cygpath -@@ -2854,11 +1574,10 @@ - # result in func_convert_core_msys_to_w32_result. - func_convert_core_msys_to_w32 () - { -- $debug_cmd -- -+ $opt_debug - # awkward: cmd appends spaces to result - func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null | -- $SED -e 's/[ ]*$//' -e "$sed_naive_backslashify"` -+ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` - } - #end: func_convert_core_msys_to_w32 - -@@ -2869,14 +1588,13 @@ - # func_to_host_file_result to ARG1). - func_convert_file_check () - { -- $debug_cmd -- -- if test -z "$2" && test -n "$1"; then -+ $opt_debug -+ if test -z "$2" && test -n "$1" ; then - func_error "Could not determine host file name corresponding to" -- func_error " '$1'" -+ func_error " \`$1'" - func_error "Continuing, but uninstalled executables may not work." - # Fallback: -- func_to_host_file_result=$1 -+ func_to_host_file_result="$1" - fi - } - # end func_convert_file_check -@@ -2888,11 +1606,10 @@ - # func_to_host_file_result to a simplistic fallback value (see below). - func_convert_path_check () - { -- $debug_cmd -- -+ $opt_debug - if test -z "$4" && test -n "$3"; then - func_error "Could not determine the host path corresponding to" -- func_error " '$3'" -+ func_error " \`$3'" - func_error "Continuing, but uninstalled executables may not work." - # Fallback. This is a deliberately simplistic "conversion" and - # should not be "improved". See libtool.info. -@@ -2901,7 +1618,7 @@ - func_to_host_path_result=`echo "$3" | - $SED -e "$lt_replace_pathsep_chars"` - else -- func_to_host_path_result=$3 -+ func_to_host_path_result="$3" - fi - fi - } -@@ -2913,10 +1630,9 @@ - # and appending REPL if ORIG matches BACKPAT. - func_convert_path_front_back_pathsep () - { -- $debug_cmd -- -+ $opt_debug - case $4 in -- $1 ) func_to_host_path_result=$3$func_to_host_path_result -+ $1 ) func_to_host_path_result="$3$func_to_host_path_result" - ;; - esac - case $4 in -@@ -2930,7 +1646,7 @@ - ################################################## - # $build to $host FILE NAME CONVERSION FUNCTIONS # - ################################################## --# invoked via '$to_host_file_cmd ARG' -+# invoked via `$to_host_file_cmd ARG' - # - # In each case, ARG is the path to be converted from $build to $host format. - # Result will be available in $func_to_host_file_result. -@@ -2941,8 +1657,7 @@ - # in func_to_host_file_result. - func_to_host_file () - { -- $debug_cmd -- -+ $opt_debug - $to_host_file_cmd "$1" - } - # end func_to_host_file -@@ -2954,8 +1669,7 @@ - # in (the comma separated) LAZY, no conversion takes place. - func_to_tool_file () - { -- $debug_cmd -- -+ $opt_debug - case ,$2, in - *,"$to_tool_file_cmd",*) - func_to_tool_file_result=$1 -@@ -2973,7 +1687,7 @@ - # Copy ARG to func_to_host_file_result. - func_convert_file_noop () - { -- func_to_host_file_result=$1 -+ func_to_host_file_result="$1" - } - # end func_convert_file_noop - -@@ -2984,12 +1698,11 @@ - # func_to_host_file_result. - func_convert_file_msys_to_w32 () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - func_convert_core_msys_to_w32 "$1" -- func_to_host_file_result=$func_convert_core_msys_to_w32_result -+ func_to_host_file_result="$func_convert_core_msys_to_w32_result" - fi - func_convert_file_check "$1" "$func_to_host_file_result" - } -@@ -3001,9 +1714,8 @@ - # func_to_host_file_result. - func_convert_file_cygwin_to_w32 () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - # because $build is cygwin, we call "the" cygpath in $PATH; no need to use - # LT_CYGPATH in this case. -@@ -3019,12 +1731,11 @@ - # and a working winepath. Returns result in func_to_host_file_result. - func_convert_file_nix_to_w32 () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - func_convert_core_file_wine_to_w32 "$1" -- func_to_host_file_result=$func_convert_core_file_wine_to_w32_result -+ func_to_host_file_result="$func_convert_core_file_wine_to_w32_result" - fi - func_convert_file_check "$1" "$func_to_host_file_result" - } -@@ -3036,13 +1747,12 @@ - # Returns result in func_to_host_file_result. - func_convert_file_msys_to_cygwin () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - func_convert_core_msys_to_w32 "$1" - func_cygpath -u "$func_convert_core_msys_to_w32_result" -- func_to_host_file_result=$func_cygpath_result -+ func_to_host_file_result="$func_cygpath_result" - fi - func_convert_file_check "$1" "$func_to_host_file_result" - } -@@ -3055,14 +1765,13 @@ - # in func_to_host_file_result. - func_convert_file_nix_to_cygwin () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - # convert from *nix to w32, then use cygpath to convert from w32 to cygwin. - func_convert_core_file_wine_to_w32 "$1" - func_cygpath -u "$func_convert_core_file_wine_to_w32_result" -- func_to_host_file_result=$func_cygpath_result -+ func_to_host_file_result="$func_cygpath_result" - fi - func_convert_file_check "$1" "$func_to_host_file_result" - } -@@ -3072,7 +1781,7 @@ - ############################################# - # $build to $host PATH CONVERSION FUNCTIONS # - ############################################# --# invoked via '$to_host_path_cmd ARG' -+# invoked via `$to_host_path_cmd ARG' - # - # In each case, ARG is the path to be converted from $build to $host format. - # The result will be available in $func_to_host_path_result. -@@ -3096,11 +1805,10 @@ - to_host_path_cmd= - func_init_to_host_path_cmd () - { -- $debug_cmd -- -+ $opt_debug - if test -z "$to_host_path_cmd"; then - func_stripname 'func_convert_file_' '' "$to_host_file_cmd" -- to_host_path_cmd=func_convert_path_$func_stripname_result -+ to_host_path_cmd="func_convert_path_${func_stripname_result}" - fi - } - -@@ -3110,8 +1818,7 @@ - # in func_to_host_path_result. - func_to_host_path () - { -- $debug_cmd -- -+ $opt_debug - func_init_to_host_path_cmd - $to_host_path_cmd "$1" - } -@@ -3122,7 +1829,7 @@ - # Copy ARG to func_to_host_path_result. - func_convert_path_noop () - { -- func_to_host_path_result=$1 -+ func_to_host_path_result="$1" - } - # end func_convert_path_noop - -@@ -3133,9 +1840,8 @@ - # func_to_host_path_result. - func_convert_path_msys_to_w32 () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # Remove leading and trailing path separator characters from ARG. MSYS - # behavior is inconsistent here; cygpath turns them into '.;' and ';.'; -@@ -3143,7 +1849,7 @@ - func_stripname : : "$1" - func_to_host_path_tmp1=$func_stripname_result - func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" -- func_to_host_path_result=$func_convert_core_msys_to_w32_result -+ func_to_host_path_result="$func_convert_core_msys_to_w32_result" - func_convert_path_check : ";" \ - "$func_to_host_path_tmp1" "$func_to_host_path_result" - func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" -@@ -3157,9 +1863,8 @@ - # func_to_host_file_result. - func_convert_path_cygwin_to_w32 () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # See func_convert_path_msys_to_w32: - func_stripname : : "$1" -@@ -3178,15 +1883,14 @@ - # a working winepath. Returns result in func_to_host_file_result. - func_convert_path_nix_to_w32 () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # See func_convert_path_msys_to_w32: - func_stripname : : "$1" - func_to_host_path_tmp1=$func_stripname_result - func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" -- func_to_host_path_result=$func_convert_core_path_wine_to_w32_result -+ func_to_host_path_result="$func_convert_core_path_wine_to_w32_result" - func_convert_path_check : ";" \ - "$func_to_host_path_tmp1" "$func_to_host_path_result" - func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" -@@ -3200,16 +1904,15 @@ - # Returns result in func_to_host_file_result. - func_convert_path_msys_to_cygwin () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # See func_convert_path_msys_to_w32: - func_stripname : : "$1" - func_to_host_path_tmp1=$func_stripname_result - func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" - func_cygpath -u -p "$func_convert_core_msys_to_w32_result" -- func_to_host_path_result=$func_cygpath_result -+ func_to_host_path_result="$func_cygpath_result" - func_convert_path_check : : \ - "$func_to_host_path_tmp1" "$func_to_host_path_result" - func_convert_path_front_back_pathsep ":*" "*:" : "$1" -@@ -3224,9 +1927,8 @@ - # func_to_host_file_result. - func_convert_path_nix_to_cygwin () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # Remove leading and trailing path separator characters from - # ARG. msys behavior is inconsistent here, cygpath turns them -@@ -3235,7 +1937,7 @@ - func_to_host_path_tmp1=$func_stripname_result - func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" - func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result" -- func_to_host_path_result=$func_cygpath_result -+ func_to_host_path_result="$func_cygpath_result" - func_convert_path_check : : \ - "$func_to_host_path_tmp1" "$func_to_host_path_result" - func_convert_path_front_back_pathsep ":*" "*:" : "$1" -@@ -3244,31 +1946,13 @@ - # end func_convert_path_nix_to_cygwin - - --# func_dll_def_p FILE --# True iff FILE is a Windows DLL '.def' file. --# Keep in sync with _LT_DLL_DEF_P in libtool.m4 --func_dll_def_p () --{ -- $debug_cmd -- -- func_dll_def_p_tmp=`$SED -n \ -- -e 's/^[ ]*//' \ -- -e '/^\(;.*\)*$/d' \ -- -e 's/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p' \ -- -e q \ -- "$1"` -- test DEF = "$func_dll_def_p_tmp" --} -- -- - # func_mode_compile arg... - func_mode_compile () - { -- $debug_cmd -- -+ $opt_debug - # Get the compilation command and the source file. - base_compile= -- srcfile=$nonopt # always keep a non-empty value in "srcfile" -+ srcfile="$nonopt" # always keep a non-empty value in "srcfile" - suppress_opt=yes - suppress_output= - arg_mode=normal -@@ -3281,12 +1965,12 @@ - case $arg_mode in - arg ) - # do not "continue". Instead, add this to base_compile -- lastarg=$arg -+ lastarg="$arg" - arg_mode=normal - ;; - - target ) -- libobj=$arg -+ libobj="$arg" - arg_mode=normal - continue - ;; -@@ -3296,7 +1980,7 @@ - case $arg in - -o) - test -n "$libobj" && \ -- func_fatal_error "you cannot specify '-o' more than once" -+ func_fatal_error "you cannot specify \`-o' more than once" - arg_mode=target - continue - ;; -@@ -3325,12 +2009,12 @@ - func_stripname '-Wc,' '' "$arg" - args=$func_stripname_result - lastarg= -- save_ifs=$IFS; IFS=, -+ save_ifs="$IFS"; IFS=',' - for arg in $args; do -- IFS=$save_ifs -+ IFS="$save_ifs" - func_append_quoted lastarg "$arg" - done -- IFS=$save_ifs -+ IFS="$save_ifs" - func_stripname ' ' '' "$lastarg" - lastarg=$func_stripname_result - -@@ -3343,8 +2027,8 @@ - # Accept the current argument as the source file. - # The previous "srcfile" becomes the current argument. - # -- lastarg=$srcfile -- srcfile=$arg -+ lastarg="$srcfile" -+ srcfile="$arg" - ;; - esac # case $arg - ;; -@@ -3359,13 +2043,13 @@ - func_fatal_error "you must specify an argument for -Xcompile" - ;; - target) -- func_fatal_error "you must specify a target with '-o'" -+ func_fatal_error "you must specify a target with \`-o'" - ;; - *) - # Get the name of the library object. - test -z "$libobj" && { - func_basename "$srcfile" -- libobj=$func_basename_result -+ libobj="$func_basename_result" - } - ;; - esac -@@ -3385,7 +2069,7 @@ - case $libobj in - *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;; - *) -- func_fatal_error "cannot determine name of library object from '$libobj'" -+ func_fatal_error "cannot determine name of library object from \`$libobj'" - ;; - esac - -@@ -3394,8 +2078,8 @@ - for arg in $later; do - case $arg in - -shared) -- test yes = "$build_libtool_libs" \ -- || func_fatal_configuration "cannot build a shared library" -+ test "$build_libtool_libs" != yes && \ -+ func_fatal_configuration "can not build a shared library" - build_old_libs=no - continue - ;; -@@ -3421,17 +2105,17 @@ - func_quote_for_eval "$libobj" - test "X$libobj" != "X$func_quote_for_eval_result" \ - && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \ -- && func_warning "libobj name '$libobj' may not contain shell special characters." -+ && func_warning "libobj name \`$libobj' may not contain shell special characters." - func_dirname_and_basename "$obj" "/" "" -- objname=$func_basename_result -- xdir=$func_dirname_result -- lobj=$xdir$objdir/$objname -+ objname="$func_basename_result" -+ xdir="$func_dirname_result" -+ lobj=${xdir}$objdir/$objname - - test -z "$base_compile" && \ - func_fatal_help "you must specify a compilation command" - - # Delete any leftover library objects. -- if test yes = "$build_old_libs"; then -+ if test "$build_old_libs" = yes; then - removelist="$obj $lobj $libobj ${libobj}T" - else - removelist="$lobj $libobj ${libobj}T" -@@ -3443,16 +2127,16 @@ - pic_mode=default - ;; - esac -- if test no = "$pic_mode" && test pass_all != "$deplibs_check_method"; then -+ if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then - # non-PIC code in shared libraries is not supported - pic_mode=default - fi - - # Calculate the filename of the output object if compiler does - # not support -o with -c -- if test no = "$compiler_c_o"; then -- output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.$objext -- lockfile=$output_obj.lock -+ if test "$compiler_c_o" = no; then -+ output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext} -+ lockfile="$output_obj.lock" - else - output_obj= - need_locks=no -@@ -3461,12 +2145,12 @@ - - # Lock this critical section if it is needed - # We use this script file to make the link, it avoids creating a new file -- if test yes = "$need_locks"; then -+ if test "$need_locks" = yes; then - until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do - func_echo "Waiting for $lockfile to be removed" - sleep 2 - done -- elif test warn = "$need_locks"; then -+ elif test "$need_locks" = warn; then - if test -f "$lockfile"; then - $ECHO "\ - *** ERROR, $lockfile exists and contains: -@@ -3474,7 +2158,7 @@ - - This indicates that another process is trying to use the same - temporary object file, and libtool could not work around it because --your compiler does not support '-c' and '-o' together. If you -+your compiler does not support \`-c' and \`-o' together. If you - repeat this compilation, it may succeed, by chance, but you had better - avoid parallel builds (make -j) in this platform, or get a better - compiler." -@@ -3496,11 +2180,11 @@ - qsrcfile=$func_quote_for_eval_result - - # Only build a PIC object if we are building libtool libraries. -- if test yes = "$build_libtool_libs"; then -+ if test "$build_libtool_libs" = yes; then - # Without this assignment, base_compile gets emptied. - fbsd_hideous_sh_bug=$base_compile - -- if test no != "$pic_mode"; then -+ if test "$pic_mode" != no; then - command="$base_compile $qsrcfile $pic_flag" - else - # Don't build PIC code -@@ -3517,7 +2201,7 @@ - func_show_eval_locale "$command" \ - 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE' - -- if test warn = "$need_locks" && -+ if test "$need_locks" = warn && - test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then - $ECHO "\ - *** ERROR, $lockfile contains: -@@ -3528,7 +2212,7 @@ - - This indicates that another process is trying to use the same - temporary object file, and libtool could not work around it because --your compiler does not support '-c' and '-o' together. If you -+your compiler does not support \`-c' and \`-o' together. If you - repeat this compilation, it may succeed, by chance, but you had better - avoid parallel builds (make -j) in this platform, or get a better - compiler." -@@ -3544,20 +2228,20 @@ - fi - - # Allow error messages only from the first compilation. -- if test yes = "$suppress_opt"; then -+ if test "$suppress_opt" = yes; then - suppress_output=' >/dev/null 2>&1' - fi - fi - - # Only build a position-dependent object if we build old libraries. -- if test yes = "$build_old_libs"; then -- if test yes != "$pic_mode"; then -+ if test "$build_old_libs" = yes; then -+ if test "$pic_mode" != yes; then - # Don't build PIC code - command="$base_compile $qsrcfile$pie_flag" - else - command="$base_compile $qsrcfile $pic_flag" - fi -- if test yes = "$compiler_c_o"; then -+ if test "$compiler_c_o" = yes; then - func_append command " -o $obj" - fi - -@@ -3566,7 +2250,7 @@ - func_show_eval_locale "$command" \ - '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' - -- if test warn = "$need_locks" && -+ if test "$need_locks" = warn && - test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then - $ECHO "\ - *** ERROR, $lockfile contains: -@@ -3577,7 +2261,7 @@ - - This indicates that another process is trying to use the same - temporary object file, and libtool could not work around it because --your compiler does not support '-c' and '-o' together. If you -+your compiler does not support \`-c' and \`-o' together. If you - repeat this compilation, it may succeed, by chance, but you had better - avoid parallel builds (make -j) in this platform, or get a better - compiler." -@@ -3597,7 +2281,7 @@ - func_write_libtool_object "$libobj" "$objdir/$objname" "$objname" - - # Unlock the critical section if it was locked -- if test no != "$need_locks"; then -+ if test "$need_locks" != no; then - removelist=$lockfile - $RM "$lockfile" - fi -@@ -3607,7 +2291,7 @@ - } - - $opt_help || { -- test compile = "$opt_mode" && func_mode_compile ${1+"$@"} -+ test "$opt_mode" = compile && func_mode_compile ${1+"$@"} - } - - func_mode_help () -@@ -3627,7 +2311,7 @@ - Remove files from the build directory. - - RM is the name of the program to use to delete files associated with each FILE --(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed -+(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed - to RM. - - If FILE is a libtool library, object or program, all the files associated -@@ -3646,16 +2330,16 @@ - -no-suppress do not suppress compiler output for multiple passes - -prefer-pic try to build PIC objects only - -prefer-non-pic try to build non-PIC objects only -- -shared do not build a '.o' file suitable for static linking -- -static only build a '.o' file suitable for static linking -+ -shared do not build a \`.o' file suitable for static linking -+ -static only build a \`.o' file suitable for static linking - -Wc,FLAG pass FLAG directly to the compiler - --COMPILE-COMMAND is a command to be used in creating a 'standard' object file -+COMPILE-COMMAND is a command to be used in creating a \`standard' object file - from the given SOURCEFILE. - - The output file name is determined by removing the directory component from --SOURCEFILE, then substituting the C source code suffix '.c' with the --library object suffix, '.lo'." -+SOURCEFILE, then substituting the C source code suffix \`.c' with the -+library object suffix, \`.lo'." - ;; - - execute) -@@ -3668,7 +2352,7 @@ - - -dlopen FILE add the directory containing FILE to the library path - --This mode sets the library path environment variable according to '-dlopen' -+This mode sets the library path environment variable according to \`-dlopen' - flags. - - If any of the ARGS are libtool executable wrappers, then they are translated -@@ -3687,7 +2371,7 @@ - Each LIBDIR is a directory that contains libtool libraries. - - The commands that this mode executes may require superuser privileges. Use --the '--dry-run' option if you just want to see what would be executed." -+the \`--dry-run' option if you just want to see what would be executed." - ;; - - install) -@@ -3697,7 +2381,7 @@ - Install executables or libraries. - - INSTALL-COMMAND is the installation command. The first component should be --either the 'install' or 'cp' program. -+either the \`install' or \`cp' program. - - The following components of INSTALL-COMMAND are treated specially: - -@@ -3723,7 +2407,7 @@ - -avoid-version do not add a version suffix if possible - -bindir BINDIR specify path to binaries directory (for systems where - libraries must be found in the PATH setting at runtime) -- -dlopen FILE '-dlpreopen' FILE if it cannot be dlopened at runtime -+ -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime - -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols - -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) - -export-symbols SYMFILE -@@ -3737,8 +2421,7 @@ - -no-install link a not-installable executable - -no-undefined declare that a library does not refer to external symbols - -o OUTPUT-FILE create OUTPUT-FILE from the specified objects -- -objectlist FILE use a list of object files found in FILE to specify objects -- -os2dllname NAME force a short DLL name on OS/2 (no effect on other OSes) -+ -objectlist FILE Use a list of object files found in FILE to specify objects - -precious-files-regex REGEX - don't remove output files matching REGEX - -release RELEASE specify package release information -@@ -3758,20 +2441,20 @@ - -Xlinker FLAG pass linker-specific FLAG directly to the linker - -XCClinker FLAG pass link-specific FLAG to the compiler driver (CC) - --All other options (arguments beginning with '-') are ignored. -+All other options (arguments beginning with \`-') are ignored. - --Every other argument is treated as a filename. Files ending in '.la' are -+Every other argument is treated as a filename. Files ending in \`.la' are - treated as uninstalled libtool libraries, other files are standard or library - object files. - --If the OUTPUT-FILE ends in '.la', then a libtool library is created, --only library objects ('.lo' files) may be specified, and '-rpath' is -+If the OUTPUT-FILE ends in \`.la', then a libtool library is created, -+only library objects (\`.lo' files) may be specified, and \`-rpath' is - required, except when creating a convenience library. - --If OUTPUT-FILE ends in '.a' or '.lib', then a standard library is created --using 'ar' and 'ranlib', or on Windows using 'lib'. -+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created -+using \`ar' and \`ranlib', or on Windows using \`lib'. - --If OUTPUT-FILE ends in '.lo' or '.$objext', then a reloadable object file -+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file - is created, otherwise an executable program is created." - ;; - -@@ -3782,7 +2465,7 @@ - Remove libraries from an installation directory. - - RM is the name of the program to use to delete files associated with each FILE --(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed -+(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed - to RM. - - If FILE is a libtool library, all the files associated with it are deleted. -@@ -3790,17 +2473,17 @@ - ;; - - *) -- func_fatal_help "invalid operation mode '$opt_mode'" -+ func_fatal_help "invalid operation mode \`$opt_mode'" - ;; - esac - - echo -- $ECHO "Try '$progname --help' for more information about other modes." -+ $ECHO "Try \`$progname --help' for more information about other modes." - } - - # Now that we've collected a possible --mode arg, show help if necessary - if $opt_help; then -- if test : = "$opt_help"; then -+ if test "$opt_help" = :; then - func_mode_help - else - { -@@ -3808,7 +2491,7 @@ - for opt_mode in compile link execute install finish uninstall clean; do - func_mode_help - done -- } | $SED -n '1p; 2,$s/^Usage:/ or: /p' -+ } | sed -n '1p; 2,$s/^Usage:/ or: /p' - { - func_help noexit - for opt_mode in compile link execute install finish uninstall clean; do -@@ -3816,7 +2499,7 @@ - func_mode_help - done - } | -- $SED '1d -+ sed '1d - /^When reporting/,/^Report/{ - H - d -@@ -3833,17 +2516,16 @@ - # func_mode_execute arg... - func_mode_execute () - { -- $debug_cmd -- -+ $opt_debug - # The first argument is the command name. -- cmd=$nonopt -+ cmd="$nonopt" - test -z "$cmd" && \ - func_fatal_help "you must specify a COMMAND" - - # Handle -dlopen flags immediately. - for file in $opt_dlopen; do - test -f "$file" \ -- || func_fatal_help "'$file' is not a file" -+ || func_fatal_help "\`$file' is not a file" - - dir= - case $file in -@@ -3853,7 +2535,7 @@ - - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$file" \ -- || func_fatal_help "'$lib' is not a valid libtool archive" -+ || func_fatal_help "\`$lib' is not a valid libtool archive" - - # Read the libtool library. - dlname= -@@ -3864,18 +2546,18 @@ - if test -z "$dlname"; then - # Warn if it was a shared library. - test -n "$library_names" && \ -- func_warning "'$file' was not linked with '-export-dynamic'" -+ func_warning "\`$file' was not linked with \`-export-dynamic'" - continue - fi - - func_dirname "$file" "" "." -- dir=$func_dirname_result -+ dir="$func_dirname_result" - - if test -f "$dir/$objdir/$dlname"; then - func_append dir "/$objdir" - else - if test ! -f "$dir/$dlname"; then -- func_fatal_error "cannot find '$dlname' in '$dir' or '$dir/$objdir'" -+ func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" - fi - fi - ;; -@@ -3883,18 +2565,18 @@ - *.lo) - # Just add the directory containing the .lo file. - func_dirname "$file" "" "." -- dir=$func_dirname_result -+ dir="$func_dirname_result" - ;; - - *) -- func_warning "'-dlopen' is ignored for non-libtool libraries and objects" -+ func_warning "\`-dlopen' is ignored for non-libtool libraries and objects" - continue - ;; - esac - - # Get the absolute pathname. - absdir=`cd "$dir" && pwd` -- test -n "$absdir" && dir=$absdir -+ test -n "$absdir" && dir="$absdir" - - # Now add the directory to shlibpath_var. - if eval "test -z \"\$$shlibpath_var\""; then -@@ -3906,7 +2588,7 @@ - - # This variable tells wrapper scripts just to set shlibpath_var - # rather than running their programs. -- libtool_execute_magic=$magic -+ libtool_execute_magic="$magic" - - # Check if any of the arguments is a wrapper script. - args= -@@ -3919,12 +2601,12 @@ - if func_ltwrapper_script_p "$file"; then - func_source "$file" - # Transform arg to wrapped name. -- file=$progdir/$program -+ file="$progdir/$program" - elif func_ltwrapper_executable_p "$file"; then - func_ltwrapper_scriptname "$file" - func_source "$func_ltwrapper_scriptname_result" - # Transform arg to wrapped name. -- file=$progdir/$program -+ file="$progdir/$program" - fi - ;; - esac -@@ -3932,15 +2614,7 @@ - func_append_quoted args "$file" - done - -- if $opt_dry_run; then -- # Display what would be done. -- if test -n "$shlibpath_var"; then -- eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" -- echo "export $shlibpath_var" -- fi -- $ECHO "$cmd$args" -- exit $EXIT_SUCCESS -- else -+ if test "X$opt_dry_run" = Xfalse; then - if test -n "$shlibpath_var"; then - # Export the shlibpath_var. - eval "export $shlibpath_var" -@@ -3957,18 +2631,25 @@ - done - - # Now prepare to actually exec the command. -- exec_cmd=\$cmd$args -+ exec_cmd="\$cmd$args" -+ else -+ # Display what would be done. -+ if test -n "$shlibpath_var"; then -+ eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" -+ echo "export $shlibpath_var" -+ fi -+ $ECHO "$cmd$args" -+ exit $EXIT_SUCCESS - fi - } - --test execute = "$opt_mode" && func_mode_execute ${1+"$@"} -+test "$opt_mode" = execute && func_mode_execute ${1+"$@"} - - - # func_mode_finish arg... - func_mode_finish () - { -- $debug_cmd -- -+ $opt_debug - libs= - libdirs= - admincmds= -@@ -3982,11 +2663,11 @@ - if func_lalib_unsafe_p "$opt"; then - func_append libs " $opt" - else -- func_warning "'$opt' is not a valid libtool archive" -+ func_warning "\`$opt' is not a valid libtool archive" - fi - - else -- func_fatal_error "invalid argument '$opt'" -+ func_fatal_error "invalid argument \`$opt'" - fi - done - -@@ -4001,12 +2682,12 @@ - # Remove sysroot references - if $opt_dry_run; then - for lib in $libs; do -- echo "removing references to $lt_sysroot and '=' prefixes from $lib" -+ echo "removing references to $lt_sysroot and \`=' prefixes from $lib" - done - else - tmpdir=`func_mktempdir` - for lib in $libs; do -- $SED -e "$sysroot_cmd s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ -+ sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ - > $tmpdir/tmp-la - mv -f $tmpdir/tmp-la $lib - done -@@ -4031,7 +2712,7 @@ - fi - - # Exit here if they wanted silent mode. -- $opt_quiet && exit $EXIT_SUCCESS -+ $opt_silent && exit $EXIT_SUCCESS - - if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then - echo "----------------------------------------------------------------------" -@@ -4042,27 +2723,27 @@ - echo - echo "If you ever happen to want to link against installed libraries" - echo "in a given directory, LIBDIR, you must either use libtool, and" -- echo "specify the full pathname of the library, or use the '-LLIBDIR'" -+ echo "specify the full pathname of the library, or use the \`-LLIBDIR'" - echo "flag during linking and do at least one of the following:" - if test -n "$shlibpath_var"; then -- echo " - add LIBDIR to the '$shlibpath_var' environment variable" -+ echo " - add LIBDIR to the \`$shlibpath_var' environment variable" - echo " during execution" - fi - if test -n "$runpath_var"; then -- echo " - add LIBDIR to the '$runpath_var' environment variable" -+ echo " - add LIBDIR to the \`$runpath_var' environment variable" - echo " during linking" - fi - if test -n "$hardcode_libdir_flag_spec"; then - libdir=LIBDIR - eval flag=\"$hardcode_libdir_flag_spec\" - -- $ECHO " - use the '$flag' linker flag" -+ $ECHO " - use the \`$flag' linker flag" - fi - if test -n "$admincmds"; then - $ECHO " - have your system administrator run these commands:$admincmds" - fi - if test -f /etc/ld.so.conf; then -- echo " - have your system administrator add LIBDIR to '/etc/ld.so.conf'" -+ echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" - fi - echo - -@@ -4081,20 +2762,18 @@ - exit $EXIT_SUCCESS - } - --test finish = "$opt_mode" && func_mode_finish ${1+"$@"} -+test "$opt_mode" = finish && func_mode_finish ${1+"$@"} - - - # func_mode_install arg... - func_mode_install () - { -- $debug_cmd -- -+ $opt_debug - # There may be an optional sh(1) argument at the beginning of - # install_prog (especially on Windows NT). -- if test "$SHELL" = "$nonopt" || test /bin/sh = "$nonopt" || -+ if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh || - # Allow the use of GNU shtool's install command. -- case $nonopt in *shtool*) :;; *) false;; esac -- then -+ case $nonopt in *shtool*) :;; *) false;; esac; then - # Aesthetically quote it. - func_quote_for_eval "$nonopt" - install_prog="$func_quote_for_eval_result " -@@ -4121,7 +2800,7 @@ - opts= - prev= - install_type= -- isdir=false -+ isdir=no - stripme= - no_mode=: - for arg -@@ -4134,7 +2813,7 @@ - fi - - case $arg in -- -d) isdir=: ;; -+ -d) isdir=yes ;; - -f) - if $install_cp; then :; else - prev=$arg -@@ -4152,7 +2831,7 @@ - *) - # If the previous option needed an argument, then skip it. - if test -n "$prev"; then -- if test X-m = "X$prev" && test -n "$install_override_mode"; then -+ if test "x$prev" = x-m && test -n "$install_override_mode"; then - arg2=$install_override_mode - no_mode=false - fi -@@ -4177,7 +2856,7 @@ - func_fatal_help "you must specify an install program" - - test -n "$prev" && \ -- func_fatal_help "the '$prev' option requires an argument" -+ func_fatal_help "the \`$prev' option requires an argument" - - if test -n "$install_override_mode" && $no_mode; then - if $install_cp; then :; else -@@ -4199,19 +2878,19 @@ - dest=$func_stripname_result - - # Check to see that the destination is a directory. -- test -d "$dest" && isdir=: -- if $isdir; then -- destdir=$dest -+ test -d "$dest" && isdir=yes -+ if test "$isdir" = yes; then -+ destdir="$dest" - destname= - else - func_dirname_and_basename "$dest" "" "." -- destdir=$func_dirname_result -- destname=$func_basename_result -+ destdir="$func_dirname_result" -+ destname="$func_basename_result" - - # Not a directory, so check to see that there is only one file specified. - set dummy $files; shift - test "$#" -gt 1 && \ -- func_fatal_help "'$dest' is not a directory" -+ func_fatal_help "\`$dest' is not a directory" - fi - case $destdir in - [\\/]* | [A-Za-z]:[\\/]*) ;; -@@ -4220,7 +2899,7 @@ - case $file in - *.lo) ;; - *) -- func_fatal_help "'$destdir' must be an absolute directory name" -+ func_fatal_help "\`$destdir' must be an absolute directory name" - ;; - esac - done -@@ -4229,7 +2908,7 @@ - - # This variable tells wrapper scripts just to set variables rather - # than running their programs. -- libtool_install_magic=$magic -+ libtool_install_magic="$magic" - - staticlibs= - future_libdirs= -@@ -4249,7 +2928,7 @@ - - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$file" \ -- || func_fatal_help "'$file' is not a valid libtool archive" -+ || func_fatal_help "\`$file' is not a valid libtool archive" - - library_names= - old_library= -@@ -4271,7 +2950,7 @@ - fi - - func_dirname "$file" "/" "" -- dir=$func_dirname_result -+ dir="$func_dirname_result" - func_append dir "$objdir" - - if test -n "$relink_command"; then -@@ -4285,7 +2964,7 @@ - # are installed into $libdir/../bin (currently, that works fine) - # but it's something to keep an eye on. - test "$inst_prefix_dir" = "$destdir" && \ -- func_fatal_error "error: cannot install '$file' to a directory not ending in $libdir" -+ func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir" - - if test -n "$inst_prefix_dir"; then - # Stick the inst_prefix_dir data into the link command. -@@ -4294,36 +2973,29 @@ - relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"` - fi - -- func_warning "relinking '$file'" -+ func_warning "relinking \`$file'" - func_show_eval "$relink_command" \ -- 'func_fatal_error "error: relink '\''$file'\'' with the above command before installing it"' -+ 'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"' - fi - - # See the names of the shared library. - set dummy $library_names; shift - if test -n "$1"; then -- realname=$1 -+ realname="$1" - shift - -- srcname=$realname -- test -n "$relink_command" && srcname=${realname}T -+ srcname="$realname" -+ test -n "$relink_command" && srcname="$realname"T - - # Install the shared library and build the symlinks. - func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \ - 'exit $?' -- tstripme=$stripme -+ tstripme="$stripme" - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - case $realname in - *.dll.a) -- tstripme= -- ;; -- esac -- ;; -- os2*) -- case $realname in -- *_dll.a) -- tstripme= -+ tstripme="" - ;; - esac - ;; -@@ -4334,7 +3006,7 @@ - - if test "$#" -gt 0; then - # Delete the old symlinks, and create new ones. -- # Try 'ln -sf' first, because the 'ln' binary might depend on -+ # Try `ln -sf' first, because the `ln' binary might depend on - # the symlink we replace! Solaris /bin/ln does not understand -f, - # so we also need to try rm && ln -s. - for linkname -@@ -4345,14 +3017,14 @@ - fi - - # Do each command in the postinstall commands. -- lib=$destdir/$realname -+ lib="$destdir/$realname" - func_execute_cmds "$postinstall_cmds" 'exit $?' - fi - - # Install the pseudo-library for information purposes. - func_basename "$file" -- name=$func_basename_result -- instname=$dir/${name}i -+ name="$func_basename_result" -+ instname="$dir/$name"i - func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' - - # Maybe install the static library, too. -@@ -4364,11 +3036,11 @@ - - # Figure out destination file name, if it wasn't already specified. - if test -n "$destname"; then -- destfile=$destdir/$destname -+ destfile="$destdir/$destname" - else - func_basename "$file" -- destfile=$func_basename_result -- destfile=$destdir/$destfile -+ destfile="$func_basename_result" -+ destfile="$destdir/$destfile" - fi - - # Deduce the name of the destination old-style object file. -@@ -4378,11 +3050,11 @@ - staticdest=$func_lo2o_result - ;; - *.$objext) -- staticdest=$destfile -+ staticdest="$destfile" - destfile= - ;; - *) -- func_fatal_help "cannot copy a libtool object to '$destfile'" -+ func_fatal_help "cannot copy a libtool object to \`$destfile'" - ;; - esac - -@@ -4391,7 +3063,7 @@ - func_show_eval "$install_prog $file $destfile" 'exit $?' - - # Install the old object if enabled. -- if test yes = "$build_old_libs"; then -+ if test "$build_old_libs" = yes; then - # Deduce the name of the old-style object file. - func_lo2o "$file" - staticobj=$func_lo2o_result -@@ -4403,23 +3075,23 @@ - *) - # Figure out destination file name, if it wasn't already specified. - if test -n "$destname"; then -- destfile=$destdir/$destname -+ destfile="$destdir/$destname" - else - func_basename "$file" -- destfile=$func_basename_result -- destfile=$destdir/$destfile -+ destfile="$func_basename_result" -+ destfile="$destdir/$destfile" - fi - - # If the file is missing, and there is a .exe on the end, strip it - # because it is most likely a libtool script we actually want to - # install -- stripped_ext= -+ stripped_ext="" - case $file in - *.exe) - if test ! -f "$file"; then - func_stripname '' '.exe' "$file" - file=$func_stripname_result -- stripped_ext=.exe -+ stripped_ext=".exe" - fi - ;; - esac -@@ -4447,19 +3119,19 @@ - - # Check the variables that should have been set. - test -z "$generated_by_libtool_version" && \ -- func_fatal_error "invalid libtool wrapper script '$wrapper'" -+ func_fatal_error "invalid libtool wrapper script \`$wrapper'" - -- finalize=: -+ finalize=yes - for lib in $notinst_deplibs; do - # Check to see that each library is installed. - libdir= - if test -f "$lib"; then - func_source "$lib" - fi -- libfile=$libdir/`$ECHO "$lib" | $SED 's%^.*/%%g'` -+ libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test - if test -n "$libdir" && test ! -f "$libfile"; then -- func_warning "'$lib' has not been installed in '$libdir'" -- finalize=false -+ func_warning "\`$lib' has not been installed in \`$libdir'" -+ finalize=no - fi - done - -@@ -4467,29 +3139,29 @@ - func_source "$wrapper" - - outputname= -- if test no = "$fast_install" && test -n "$relink_command"; then -+ if test "$fast_install" = no && test -n "$relink_command"; then - $opt_dry_run || { -- if $finalize; then -+ if test "$finalize" = yes; then - tmpdir=`func_mktempdir` - func_basename "$file$stripped_ext" -- file=$func_basename_result -- outputname=$tmpdir/$file -+ file="$func_basename_result" -+ outputname="$tmpdir/$file" - # Replace the output file specification. - relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'` - -- $opt_quiet || { -+ $opt_silent || { - func_quote_for_expand "$relink_command" - eval "func_echo $func_quote_for_expand_result" - } - if eval "$relink_command"; then : - else -- func_error "error: relink '$file' with the above command before installing it" -+ func_error "error: relink \`$file' with the above command before installing it" - $opt_dry_run || ${RM}r "$tmpdir" - continue - fi -- file=$outputname -+ file="$outputname" - else -- func_warning "cannot relink '$file'" -+ func_warning "cannot relink \`$file'" - fi - } - else -@@ -4526,10 +3198,10 @@ - - for file in $staticlibs; do - func_basename "$file" -- name=$func_basename_result -+ name="$func_basename_result" - - # Set up the ranlib parameters. -- oldlib=$destdir/$name -+ oldlib="$destdir/$name" - func_to_tool_file "$oldlib" func_convert_file_msys_to_w32 - tool_oldlib=$func_to_tool_file_result - -@@ -4544,18 +3216,18 @@ - done - - test -n "$future_libdirs" && \ -- func_warning "remember to run '$progname --finish$future_libdirs'" -+ func_warning "remember to run \`$progname --finish$future_libdirs'" - - if test -n "$current_libdirs"; then - # Maybe just do a dry run. - $opt_dry_run && current_libdirs=" -n$current_libdirs" -- exec_cmd='$SHELL "$progpath" $preserve_args --finish$current_libdirs' -+ exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' - else - exit $EXIT_SUCCESS - fi - } - --test install = "$opt_mode" && func_mode_install ${1+"$@"} -+test "$opt_mode" = install && func_mode_install ${1+"$@"} - - - # func_generate_dlsyms outputname originator pic_p -@@ -4563,17 +3235,16 @@ - # a dlpreopen symbol table. - func_generate_dlsyms () - { -- $debug_cmd -- -- my_outputname=$1 -- my_originator=$2 -- my_pic_p=${3-false} -- my_prefix=`$ECHO "$my_originator" | $SED 's%[^a-zA-Z0-9]%_%g'` -+ $opt_debug -+ my_outputname="$1" -+ my_originator="$2" -+ my_pic_p="${3-no}" -+ my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'` - my_dlsyms= - -- if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then -+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then - if test -n "$NM" && test -n "$global_symbol_pipe"; then -- my_dlsyms=${my_outputname}S.c -+ my_dlsyms="${my_outputname}S.c" - else - func_error "not configured to extract global symbols from dlpreopened files" - fi -@@ -4584,7 +3255,7 @@ - "") ;; - *.c) - # Discover the nlist of each of the dlfiles. -- nlist=$output_objdir/$my_outputname.nm -+ nlist="$output_objdir/${my_outputname}.nm" - - func_show_eval "$RM $nlist ${nlist}S ${nlist}T" - -@@ -4592,36 +3263,34 @@ - func_verbose "creating $output_objdir/$my_dlsyms" - - $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\ --/* $my_dlsyms - symbol resolution table for '$my_outputname' dlsym emulation. */ --/* Generated by $PROGRAM (GNU $PACKAGE) $VERSION */ -+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */ -+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */ - - #ifdef __cplusplus - extern \"C\" { - #endif - --#if defined __GNUC__ && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4)) -+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4)) - #pragma GCC diagnostic ignored \"-Wstrict-prototypes\" - #endif - - /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ --#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE --/* DATA imports from DLLs on WIN32 can't be const, because runtime -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime - relocations are performed -- see ld's documentation on pseudo-relocs. */ - # define LT_DLSYM_CONST --#elif defined __osf__ -+#elif defined(__osf__) - /* This system does not cope well with relocations in const data. */ - # define LT_DLSYM_CONST - #else - # define LT_DLSYM_CONST const - #endif - --#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0) -- - /* External symbol declarations for the compiler. */\ - " - -- if test yes = "$dlself"; then -- func_verbose "generating symbol list for '$output'" -+ if test "$dlself" = yes; then -+ func_verbose "generating symbol list for \`$output'" - - $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist" - -@@ -4629,7 +3298,7 @@ - progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP` - for progfile in $progfiles; do - func_to_tool_file "$progfile" func_convert_file_msys_to_w32 -- func_verbose "extracting global C symbols from '$func_to_tool_file_result'" -+ func_verbose "extracting global C symbols from \`$func_to_tool_file_result'" - $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'" - done - -@@ -4649,10 +3318,10 @@ - - # Prepare the list of exported symbols - if test -z "$export_symbols"; then -- export_symbols=$output_objdir/$outputname.exp -+ export_symbols="$output_objdir/$outputname.exp" - $opt_dry_run || { - $RM $export_symbols -- eval "$SED -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' -+ eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' - case $host in - *cygwin* | *mingw* | *cegcc* ) - eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' -@@ -4662,7 +3331,7 @@ - } - else - $opt_dry_run || { -- eval "$SED -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' -+ eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' - eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' - eval '$MV "$nlist"T "$nlist"' - case $host in -@@ -4676,22 +3345,22 @@ - fi - - for dlprefile in $dlprefiles; do -- func_verbose "extracting global C symbols from '$dlprefile'" -+ func_verbose "extracting global C symbols from \`$dlprefile'" - func_basename "$dlprefile" -- name=$func_basename_result -+ name="$func_basename_result" - case $host in - *cygwin* | *mingw* | *cegcc* ) - # if an import library, we need to obtain dlname - if func_win32_import_lib_p "$dlprefile"; then - func_tr_sh "$dlprefile" - eval "curr_lafile=\$libfile_$func_tr_sh_result" -- dlprefile_dlbasename= -+ dlprefile_dlbasename="" - if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then - # Use subshell, to avoid clobbering current variable values - dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"` -- if test -n "$dlprefile_dlname"; then -+ if test -n "$dlprefile_dlname" ; then - func_basename "$dlprefile_dlname" -- dlprefile_dlbasename=$func_basename_result -+ dlprefile_dlbasename="$func_basename_result" - else - # no lafile. user explicitly requested -dlpreopen <import library>. - $sharedlib_from_linklib_cmd "$dlprefile" -@@ -4699,7 +3368,7 @@ - fi - fi - $opt_dry_run || { -- if test -n "$dlprefile_dlbasename"; then -+ if test -n "$dlprefile_dlbasename" ; then - eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"' - else - func_warning "Could not compute DLL name from $name" -@@ -4755,11 +3424,6 @@ - echo '/* NONE */' >> "$output_objdir/$my_dlsyms" - fi - -- func_show_eval '$RM "${nlist}I"' -- if test -n "$global_symbol_to_import"; then -- eval "$global_symbol_to_import"' < "$nlist"S > "$nlist"I' -- fi -- - echo >> "$output_objdir/$my_dlsyms" "\ - - /* The mapping between symbol names and symbols. */ -@@ -4768,30 +3432,11 @@ - void *address; - } lt_dlsymlist; - extern LT_DLSYM_CONST lt_dlsymlist --lt_${my_prefix}_LTX_preloaded_symbols[];\ --" -- -- if test -s "$nlist"I; then -- echo >> "$output_objdir/$my_dlsyms" "\ --static void lt_syminit(void) --{ -- LT_DLSYM_CONST lt_dlsymlist *symbol = lt_${my_prefix}_LTX_preloaded_symbols; -- for (; symbol->name; ++symbol) -- {" -- $SED 's/.*/ if (STREQ (symbol->name, \"&\")) symbol->address = (void *) \&&;/' < "$nlist"I >> "$output_objdir/$my_dlsyms" -- echo >> "$output_objdir/$my_dlsyms" "\ -- } --}" -- fi -- echo >> "$output_objdir/$my_dlsyms" "\ -+lt_${my_prefix}_LTX_preloaded_symbols[]; - LT_DLSYM_CONST lt_dlsymlist - lt_${my_prefix}_LTX_preloaded_symbols[] = --{ {\"$my_originator\", (void *) 0}," -- -- if test -s "$nlist"I; then -- echo >> "$output_objdir/$my_dlsyms" "\ -- {\"@INIT@\", (void *) <_syminit}," -- fi -+{\ -+ { \"$my_originator\", (void *) 0 }," - - case $need_lib_prefix in - no) -@@ -4833,7 +3478,9 @@ - *-*-hpux*) - pic_flag_for_symtable=" $pic_flag" ;; - *) -- $my_pic_p && pic_flag_for_symtable=" $pic_flag" -+ if test "X$my_pic_p" != Xno; then -+ pic_flag_for_symtable=" $pic_flag" -+ fi - ;; - esac - ;; -@@ -4850,10 +3497,10 @@ - func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?' - - # Clean up the generated files. -- func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T" "${nlist}I"' -+ func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"' - - # Transform the symbol file into the correct name. -- symfileobj=$output_objdir/${my_outputname}S.$objext -+ symfileobj="$output_objdir/${my_outputname}S.$objext" - case $host in - *cygwin* | *mingw* | *cegcc* ) - if test -f "$output_objdir/$my_outputname.def"; then -@@ -4871,7 +3518,7 @@ - esac - ;; - *) -- func_fatal_error "unknown suffix for '$my_dlsyms'" -+ func_fatal_error "unknown suffix for \`$my_dlsyms'" - ;; - esac - else -@@ -4885,32 +3532,6 @@ - fi - } - --# func_cygming_gnu_implib_p ARG --# This predicate returns with zero status (TRUE) if --# ARG is a GNU/binutils-style import library. Returns --# with nonzero status (FALSE) otherwise. --func_cygming_gnu_implib_p () --{ -- $debug_cmd -- -- func_to_tool_file "$1" func_convert_file_msys_to_w32 -- func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` -- test -n "$func_cygming_gnu_implib_tmp" --} -- --# func_cygming_ms_implib_p ARG --# This predicate returns with zero status (TRUE) if --# ARG is an MS-style import library. Returns --# with nonzero status (FALSE) otherwise. --func_cygming_ms_implib_p () --{ -- $debug_cmd -- -- func_to_tool_file "$1" func_convert_file_msys_to_w32 -- func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` -- test -n "$func_cygming_ms_implib_tmp" --} -- - # func_win32_libid arg - # return the library type of file 'arg' - # -@@ -4920,9 +3541,8 @@ - # Despite the name, also deal with 64 bit binaries. - func_win32_libid () - { -- $debug_cmd -- -- win32_libid_type=unknown -+ $opt_debug -+ win32_libid_type="unknown" - win32_fileres=`file -L $1 2>/dev/null` - case $win32_fileres in - *ar\ archive\ import\ library*) # definitely import -@@ -4932,29 +3552,16 @@ - # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD. - if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | - $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then -- case $nm_interface in -- "MS dumpbin") -- if func_cygming_ms_implib_p "$1" || -- func_cygming_gnu_implib_p "$1" -- then -- win32_nmres=import -- else -- win32_nmres= -- fi -- ;; -- *) -- func_to_tool_file "$1" func_convert_file_msys_to_w32 -- win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | -- $SED -n -e ' -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | -+ $SED -n -e ' - 1,100{ - / I /{ -- s|.*|import| -+ s,.*,import, - p - q - } - }'` -- ;; -- esac - case $win32_nmres in - import*) win32_libid_type="x86 archive import";; - *) win32_libid_type="x86 archive static";; -@@ -4986,8 +3593,7 @@ - # $sharedlib_from_linklib_result - func_cygming_dll_for_implib () - { -- $debug_cmd -- -+ $opt_debug - sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"` - } - -@@ -5004,8 +3610,7 @@ - # specified import library. - func_cygming_dll_for_implib_fallback_core () - { -- $debug_cmd -- -+ $opt_debug - match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"` - $OBJDUMP -s --section "$1" "$2" 2>/dev/null | - $SED '/^Contents of section '"$match_literal"':/{ -@@ -5041,8 +3646,8 @@ - /./p' | - # we now have a list, one entry per line, of the stringified - # contents of the appropriate section of all members of the -- # archive that possess that section. Heuristic: eliminate -- # all those that have a first or second character that is -+ # archive which possess that section. Heuristic: eliminate -+ # all those which have a first or second character that is - # a '.' (that is, objdump's representation of an unprintable - # character.) This should work for all archives with less than - # 0x302f exports -- but will fail for DLLs whose name actually -@@ -5053,6 +3658,30 @@ - $SED -e '/^\./d;/^.\./d;q' - } - -+# func_cygming_gnu_implib_p ARG -+# This predicate returns with zero status (TRUE) if -+# ARG is a GNU/binutils-style import library. Returns -+# with nonzero status (FALSE) otherwise. -+func_cygming_gnu_implib_p () -+{ -+ $opt_debug -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` -+ test -n "$func_cygming_gnu_implib_tmp" -+} -+ -+# func_cygming_ms_implib_p ARG -+# This predicate returns with zero status (TRUE) if -+# ARG is an MS-style import library. Returns -+# with nonzero status (FALSE) otherwise. -+func_cygming_ms_implib_p () -+{ -+ $opt_debug -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` -+ test -n "$func_cygming_ms_implib_tmp" -+} -+ - # func_cygming_dll_for_implib_fallback ARG - # Platform-specific function to extract the - # name of the DLL associated with the specified -@@ -5066,17 +3695,16 @@ - # $sharedlib_from_linklib_result - func_cygming_dll_for_implib_fallback () - { -- $debug_cmd -- -- if func_cygming_gnu_implib_p "$1"; then -+ $opt_debug -+ if func_cygming_gnu_implib_p "$1" ; then - # binutils import library - sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"` -- elif func_cygming_ms_implib_p "$1"; then -+ elif func_cygming_ms_implib_p "$1" ; then - # ms-generated import library - sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"` - else - # unknown -- sharedlib_from_linklib_result= -+ sharedlib_from_linklib_result="" - fi - } - -@@ -5084,11 +3712,10 @@ - # func_extract_an_archive dir oldlib - func_extract_an_archive () - { -- $debug_cmd -- -- f_ex_an_ar_dir=$1; shift -- f_ex_an_ar_oldlib=$1 -- if test yes = "$lock_old_archive_extraction"; then -+ $opt_debug -+ f_ex_an_ar_dir="$1"; shift -+ f_ex_an_ar_oldlib="$1" -+ if test "$lock_old_archive_extraction" = yes; then - lockfile=$f_ex_an_ar_oldlib.lock - until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do - func_echo "Waiting for $lockfile to be removed" -@@ -5097,7 +3724,7 @@ - fi - func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \ - 'stat=$?; rm -f "$lockfile"; exit $stat' -- if test yes = "$lock_old_archive_extraction"; then -+ if test "$lock_old_archive_extraction" = yes; then - $opt_dry_run || rm -f "$lockfile" - fi - if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then -@@ -5111,23 +3738,22 @@ - # func_extract_archives gentop oldlib ... - func_extract_archives () - { -- $debug_cmd -- -- my_gentop=$1; shift -+ $opt_debug -+ my_gentop="$1"; shift - my_oldlibs=${1+"$@"} -- my_oldobjs= -- my_xlib= -- my_xabs= -- my_xdir= -+ my_oldobjs="" -+ my_xlib="" -+ my_xabs="" -+ my_xdir="" - - for my_xlib in $my_oldlibs; do - # Extract the objects. - case $my_xlib in -- [\\/]* | [A-Za-z]:[\\/]*) my_xabs=$my_xlib ;; -+ [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;; - *) my_xabs=`pwd`"/$my_xlib" ;; - esac - func_basename "$my_xlib" -- my_xlib=$func_basename_result -+ my_xlib="$func_basename_result" - my_xlib_u=$my_xlib - while :; do - case " $extracted_archives " in -@@ -5139,7 +3765,7 @@ - esac - done - extracted_archives="$extracted_archives $my_xlib_u" -- my_xdir=$my_gentop/$my_xlib_u -+ my_xdir="$my_gentop/$my_xlib_u" - - func_mkdir_p "$my_xdir" - -@@ -5152,23 +3778,22 @@ - cd $my_xdir || exit $? - darwin_archive=$my_xabs - darwin_curdir=`pwd` -- func_basename "$darwin_archive" -- darwin_base_archive=$func_basename_result -+ darwin_base_archive=`basename "$darwin_archive"` - darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true` - if test -n "$darwin_arches"; then - darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'` - darwin_arch= - func_verbose "$darwin_base_archive has multiple architectures $darwin_arches" -- for darwin_arch in $darwin_arches; do -- func_mkdir_p "unfat-$$/$darwin_base_archive-$darwin_arch" -- $LIPO -thin $darwin_arch -output "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" "$darwin_archive" -- cd "unfat-$$/$darwin_base_archive-$darwin_arch" -- func_extract_an_archive "`pwd`" "$darwin_base_archive" -+ for darwin_arch in $darwin_arches ; do -+ func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}" -+ $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}" -+ cd "unfat-$$/${darwin_base_archive}-${darwin_arch}" -+ func_extract_an_archive "`pwd`" "${darwin_base_archive}" - cd "$darwin_curdir" -- $RM "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" -+ $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" - done # $darwin_arches - ## Okay now we've a bunch of thin objects, gotta fatten them up :) -- darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$sed_basename" | sort -u` -+ darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u` - darwin_file= - darwin_files= - for darwin_file in $darwin_filelist; do -@@ -5190,7 +3815,7 @@ - my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP` - done - -- func_extract_archives_result=$my_oldobjs -+ func_extract_archives_result="$my_oldobjs" - } - - -@@ -5205,7 +3830,7 @@ - # - # ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR - # variable will take. If 'yes', then the emitted script --# will assume that the directory where it is stored is -+# will assume that the directory in which it is stored is - # the $objdir directory. This is a cygwin/mingw-specific - # behavior. - func_emit_wrapper () -@@ -5216,7 +3841,7 @@ - #! $SHELL - - # $output - temporary wrapper script for $objdir/$outputname --# Generated by $PROGRAM (GNU $PACKAGE) $VERSION -+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - # - # The $output program cannot be directly executed until all the libtool - # libraries that it depends on are installed. -@@ -5273,9 +3898,9 @@ - - # Very basic option parsing. These options are (a) specific to - # the libtool wrapper, (b) are identical between the wrapper --# /script/ and the wrapper /executable/ that is used only on -+# /script/ and the wrapper /executable/ which is used only on - # windows platforms, and (c) all begin with the string "--lt-" --# (application programs are unlikely to have options that match -+# (application programs are unlikely to have options which match - # this pattern). - # - # There are only two supported options: --lt-debug and -@@ -5308,7 +3933,7 @@ - - # Print the debug banner immediately: - if test -n \"\$lt_option_debug\"; then -- echo \"$outputname:$output:\$LINENO: libtool wrapper (GNU $PACKAGE) $VERSION\" 1>&2 -+ echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2 - fi - } - -@@ -5319,7 +3944,7 @@ - lt_dump_args_N=1; - for lt_arg - do -- \$ECHO \"$outputname:$output:\$LINENO: newargv[\$lt_dump_args_N]: \$lt_arg\" -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\" - lt_dump_args_N=\`expr \$lt_dump_args_N + 1\` - done - } -@@ -5333,7 +3958,7 @@ - *-*-mingw | *-*-os2* | *-cegcc*) - $ECHO "\ - if test -n \"\$lt_option_debug\"; then -- \$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir\\\\\$program\" 1>&2 -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2 - func_lt_dump_args \${1+\"\$@\"} 1>&2 - fi - exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} -@@ -5343,7 +3968,7 @@ - *) - $ECHO "\ - if test -n \"\$lt_option_debug\"; then -- \$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir/\$program\" 1>&2 -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2 - func_lt_dump_args \${1+\"\$@\"} 1>&2 - fi - exec \"\$progdir/\$program\" \${1+\"\$@\"} -@@ -5418,13 +4043,13 @@ - test -n \"\$absdir\" && thisdir=\"\$absdir\" - " - -- if test yes = "$fast_install"; then -+ if test "$fast_install" = yes; then - $ECHO "\ - program=lt-'$outputname'$exeext - progdir=\"\$thisdir/$objdir\" - - if test ! -f \"\$progdir/\$program\" || -- { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | $SED 1q\`; \\ -+ { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\ - test \"X\$file\" != \"X\$progdir/\$program\"; }; then - - file=\"\$\$-\$program\" -@@ -5441,7 +4066,7 @@ - if test -n \"\$relink_command\"; then - if relink_command_output=\`eval \$relink_command 2>&1\`; then : - else -- \$ECHO \"\$relink_command_output\" >&2 -+ $ECHO \"\$relink_command_output\" >&2 - $RM \"\$progdir/\$file\" - exit 1 - fi -@@ -5476,7 +4101,7 @@ - fi - - # Export our shlibpath_var if we have one. -- if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then -+ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then - $ECHO "\ - # Add our own library path to $shlibpath_var - $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" -@@ -5496,7 +4121,7 @@ - fi - else - # The program doesn't exist. -- \$ECHO \"\$0: error: '\$progdir/\$program' does not exist\" 1>&2 -+ \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2 - \$ECHO \"This script is just a wrapper for \$program.\" 1>&2 - \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2 - exit 1 -@@ -5515,7 +4140,7 @@ - cat <<EOF - - /* $cwrappersource - temporary wrapper executable for $objdir/$outputname -- Generated by $PROGRAM (GNU $PACKAGE) $VERSION -+ Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - - The $output program cannot be directly executed until all the libtool - libraries that it depends on are installed. -@@ -5550,45 +4175,47 @@ - #include <fcntl.h> - #include <sys/stat.h> - --#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0) -- - /* declarations of non-ANSI functions */ --#if defined __MINGW32__ -+#if defined(__MINGW32__) - # ifdef __STRICT_ANSI__ - int _putenv (const char *); - # endif --#elif defined __CYGWIN__ -+#elif defined(__CYGWIN__) - # ifdef __STRICT_ANSI__ - char *realpath (const char *, char *); - int putenv (char *); - int setenv (const char *, const char *, int); - # endif --/* #elif defined other_platform || defined ... */ -+/* #elif defined (other platforms) ... */ - #endif - - /* portability defines, excluding path handling macros */ --#if defined _MSC_VER -+#if defined(_MSC_VER) - # define setmode _setmode - # define stat _stat - # define chmod _chmod - # define getcwd _getcwd - # define putenv _putenv - # define S_IXUSR _S_IEXEC --#elif defined __MINGW32__ -+# ifndef _INTPTR_T_DEFINED -+# define _INTPTR_T_DEFINED -+# define intptr_t int -+# endif -+#elif defined(__MINGW32__) - # define setmode _setmode - # define stat _stat - # define chmod _chmod - # define getcwd _getcwd - # define putenv _putenv --#elif defined __CYGWIN__ -+#elif defined(__CYGWIN__) - # define HAVE_SETENV - # define FOPEN_WB "wb" --/* #elif defined other platforms ... */ -+/* #elif defined (other platforms) ... */ - #endif - --#if defined PATH_MAX -+#if defined(PATH_MAX) - # define LT_PATHMAX PATH_MAX --#elif defined MAXPATHLEN -+#elif defined(MAXPATHLEN) - # define LT_PATHMAX MAXPATHLEN - #else - # define LT_PATHMAX 1024 -@@ -5607,8 +4234,8 @@ - # define PATH_SEPARATOR ':' - #endif - --#if defined _WIN32 || defined __MSDOS__ || defined __DJGPP__ || \ -- defined __OS2__ -+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \ -+ defined (__OS2__) - # define HAVE_DOS_BASED_FILE_SYSTEM - # define FOPEN_WB "wb" - # ifndef DIR_SEPARATOR_2 -@@ -5641,10 +4268,10 @@ - - #define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) - #define XFREE(stale) do { \ -- if (stale) { free (stale); stale = 0; } \ -+ if (stale) { free ((void *) stale); stale = 0; } \ - } while (0) - --#if defined LT_DEBUGWRAPPER -+#if defined(LT_DEBUGWRAPPER) - static int lt_debug = 1; - #else - static int lt_debug = 0; -@@ -5673,16 +4300,11 @@ - EOF - - cat <<EOF --#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 5) --# define externally_visible volatile --#else --# define externally_visible __attribute__((externally_visible)) volatile --#endif --externally_visible const char * MAGIC_EXE = "$magic_exe"; -+volatile const char * MAGIC_EXE = "$magic_exe"; - const char * LIB_PATH_VARNAME = "$shlibpath_var"; - EOF - -- if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then -+ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then - func_to_host_path "$temp_rpath" - cat <<EOF - const char * LIB_PATH_VALUE = "$func_to_host_path_result"; -@@ -5706,7 +4328,7 @@ - EOF - fi - -- if test yes = "$fast_install"; then -+ if test "$fast_install" = yes; then - cat <<EOF - const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */ - EOF -@@ -5735,12 +4357,12 @@ - char *actual_cwrapper_name; - char *target_name; - char *lt_argv_zero; -- int rval = 127; -+ intptr_t rval = 127; - - int i; - - program_name = (char *) xstrdup (base_name (argv[0])); -- newargz = XMALLOC (char *, (size_t) argc + 1); -+ newargz = XMALLOC (char *, argc + 1); - - /* very simple arg parsing; don't want to rely on getopt - * also, copy all non cwrapper options to newargz, except -@@ -5749,10 +4371,10 @@ - newargc=0; - for (i = 1; i < argc; i++) - { -- if (STREQ (argv[i], dumpscript_opt)) -+ if (strcmp (argv[i], dumpscript_opt) == 0) - { - EOF -- case $host in -+ case "$host" in - *mingw* | *cygwin* ) - # make stdout use "unix" line endings - echo " setmode(1,_O_BINARY);" -@@ -5763,12 +4385,12 @@ - lt_dump_script (stdout); - return 0; - } -- if (STREQ (argv[i], debug_opt)) -+ if (strcmp (argv[i], debug_opt) == 0) - { - lt_debug = 1; - continue; - } -- if (STREQ (argv[i], ltwrapper_option_prefix)) -+ if (strcmp (argv[i], ltwrapper_option_prefix) == 0) - { - /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX - namespace, but it is not one of the ones we know about and -@@ -5791,7 +4413,7 @@ - EOF - cat <<EOF - /* The GNU banner must be the first non-error debug message */ -- lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE) $VERSION\n"); -+ lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n"); - EOF - cat <<"EOF" - lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]); -@@ -5902,7 +4524,7 @@ - cat <<"EOF" - /* execv doesn't actually work on mingw as expected on unix */ - newargz = prepare_spawn (newargz); -- rval = (int) _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz); -+ rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz); - if (rval == -1) - { - /* failed to start process */ -@@ -5947,7 +4569,7 @@ - { - const char *base; - --#if defined HAVE_DOS_BASED_FILE_SYSTEM -+#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - /* Skip over the disk name in MSDOS pathnames. */ - if (isalpha ((unsigned char) name[0]) && name[1] == ':') - name += 2; -@@ -6006,7 +4628,7 @@ - const char *p_next; - /* static buffer for getcwd */ - char tmp[LT_PATHMAX + 1]; -- size_t tmp_len; -+ int tmp_len; - char *concat_name; - - lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n", -@@ -6016,7 +4638,7 @@ - return NULL; - - /* Absolute path? */ --#if defined HAVE_DOS_BASED_FILE_SYSTEM -+#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':') - { - concat_name = xstrdup (wrapper); -@@ -6034,7 +4656,7 @@ - return concat_name; - XFREE (concat_name); - } --#if defined HAVE_DOS_BASED_FILE_SYSTEM -+#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - } - #endif - -@@ -6057,7 +4679,7 @@ - for (q = p; *q; q++) - if (IS_PATH_SEPARATOR (*q)) - break; -- p_len = (size_t) (q - p); -+ p_len = q - p; - p_next = (*q == '\0' ? q : q + 1); - if (p_len == 0) - { -@@ -6176,7 +4798,7 @@ - if (patlen <= len) - { - str += len - patlen; -- if (STREQ (str, pat)) -+ if (strcmp (str, pat) == 0) - *str = '\0'; - } - return str; -@@ -6241,7 +4863,7 @@ - char *str = xstrdup (value); - setenv (name, str, 1); - #else -- size_t len = strlen (name) + 1 + strlen (value) + 1; -+ int len = strlen (name) + 1 + strlen (value) + 1; - char *str = XMALLOC (char, len); - sprintf (str, "%s=%s", name, value); - if (putenv (str) != EXIT_SUCCESS) -@@ -6258,8 +4880,8 @@ - char *new_value; - if (orig_value && *orig_value) - { -- size_t orig_value_len = strlen (orig_value); -- size_t add_len = strlen (add); -+ int orig_value_len = strlen (orig_value); -+ int add_len = strlen (add); - new_value = XMALLOC (char, add_len + orig_value_len + 1); - if (to_end) - { -@@ -6290,10 +4912,10 @@ - { - char *new_value = lt_extend_str (getenv (name), value, 0); - /* some systems can't cope with a ':'-terminated path #' */ -- size_t len = strlen (new_value); -- while ((len > 0) && IS_PATH_SEPARATOR (new_value[len-1])) -+ int len = strlen (new_value); -+ while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1])) - { -- new_value[--len] = '\0'; -+ new_value[len-1] = '\0'; - } - lt_setenv (name, new_value); - XFREE (new_value); -@@ -6460,47 +5082,27 @@ - # True if ARG is an import lib, as indicated by $file_magic_cmd - func_win32_import_lib_p () - { -- $debug_cmd -- -+ $opt_debug - case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in - *import*) : ;; - *) false ;; - esac - } - --# func_suncc_cstd_abi --# !!ONLY CALL THIS FOR SUN CC AFTER $compile_command IS FULLY EXPANDED!! --# Several compiler flags select an ABI that is incompatible with the --# Cstd library. Avoid specifying it if any are in CXXFLAGS. --func_suncc_cstd_abi () --{ -- $debug_cmd -- -- case " $compile_command " in -- *" -compat=g "*|*\ -std=c++[0-9][0-9]\ *|*" -library=stdcxx4 "*|*" -library=stlport4 "*) -- suncc_use_cstd_abi=no -- ;; -- *) -- suncc_use_cstd_abi=yes -- ;; -- esac --} -- - # func_mode_link arg... - func_mode_link () - { -- $debug_cmd -- -+ $opt_debug - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) - # It is impossible to link a dll without this setting, and - # we shouldn't force the makefile maintainer to figure out -- # what system we are compiling for in order to pass an extra -+ # which system we are compiling for in order to pass an extra - # flag for every libtool invocation. - # allow_undefined=no - - # FIXME: Unfortunately, there are problems with the above when trying -- # to make a dll that has undefined symbols, in which case not -+ # to make a dll which has undefined symbols, in which case not - # even a static library is built. For now, we need to specify - # -no-undefined on the libtool link line when we can be certain - # that all symbols are satisfied, otherwise we get a static library. -@@ -6544,11 +5146,10 @@ - module=no - no_install=no - objs= -- os2dllname= - non_pic_objects= - precious_files_regex= - prefer_static_libs=no -- preload=false -+ preload=no - prev= - prevarg= - release= -@@ -6560,7 +5161,7 @@ - vinfo= - vinfo_number=no - weak_libs= -- single_module=$wl-single_module -+ single_module="${wl}-single_module" - func_infer_tag $base_compile - - # We need to know -static, to get the right output filenames. -@@ -6568,15 +5169,15 @@ - do - case $arg in - -shared) -- test yes != "$build_libtool_libs" \ -- && func_fatal_configuration "cannot build a shared library" -+ test "$build_libtool_libs" != yes && \ -+ func_fatal_configuration "can not build a shared library" - build_old_libs=no - break - ;; - -all-static | -static | -static-libtool-libs) - case $arg in - -all-static) -- if test yes = "$build_libtool_libs" && test -z "$link_static_flag"; then -+ if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then - func_warning "complete static linking is impossible in this configuration" - fi - if test -n "$link_static_flag"; then -@@ -6609,7 +5210,7 @@ - - # Go through the arguments, transforming them on the way. - while test "$#" -gt 0; do -- arg=$1 -+ arg="$1" - shift - func_quote_for_eval "$arg" - qarg=$func_quote_for_eval_unquoted_result -@@ -6626,21 +5227,21 @@ - - case $prev in - bindir) -- bindir=$arg -+ bindir="$arg" - prev= - continue - ;; - dlfiles|dlprefiles) -- $preload || { -+ if test "$preload" = no; then - # Add the symbol object into the linking commands. - func_append compile_command " @SYMFILE@" - func_append finalize_command " @SYMFILE@" -- preload=: -- } -+ preload=yes -+ fi - case $arg in - *.la | *.lo) ;; # We handle these cases below. - force) -- if test no = "$dlself"; then -+ if test "$dlself" = no; then - dlself=needless - export_dynamic=yes - fi -@@ -6648,9 +5249,9 @@ - continue - ;; - self) -- if test dlprefiles = "$prev"; then -+ if test "$prev" = dlprefiles; then - dlself=yes -- elif test dlfiles = "$prev" && test yes != "$dlopen_self"; then -+ elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then - dlself=yes - else - dlself=needless -@@ -6660,7 +5261,7 @@ - continue - ;; - *) -- if test dlfiles = "$prev"; then -+ if test "$prev" = dlfiles; then - func_append dlfiles " $arg" - else - func_append dlprefiles " $arg" -@@ -6671,14 +5272,14 @@ - esac - ;; - expsyms) -- export_symbols=$arg -+ export_symbols="$arg" - test -f "$arg" \ -- || func_fatal_error "symbol file '$arg' does not exist" -+ || func_fatal_error "symbol file \`$arg' does not exist" - prev= - continue - ;; - expsyms_regex) -- export_symbols_regex=$arg -+ export_symbols_regex="$arg" - prev= - continue - ;; -@@ -6696,13 +5297,7 @@ - continue - ;; - inst_prefix) -- inst_prefix_dir=$arg -- prev= -- continue -- ;; -- mllvm) -- # Clang does not use LLVM to link, so we can simply discard any -- # '-mllvm $arg' options when doing the link step. -+ inst_prefix_dir="$arg" - prev= - continue - ;; -@@ -6726,21 +5321,21 @@ - - if test -z "$pic_object" || - test -z "$non_pic_object" || -- test none = "$pic_object" && -- test none = "$non_pic_object"; then -- func_fatal_error "cannot find name of object for '$arg'" -+ test "$pic_object" = none && -+ test "$non_pic_object" = none; then -+ func_fatal_error "cannot find name of object for \`$arg'" - fi - - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" -- xdir=$func_dirname_result -+ xdir="$func_dirname_result" - -- if test none != "$pic_object"; then -+ if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. -- pic_object=$xdir$pic_object -+ pic_object="$xdir$pic_object" - -- if test dlfiles = "$prev"; then -- if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then -+ if test "$prev" = dlfiles; then -+ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - func_append dlfiles " $pic_object" - prev= - continue -@@ -6751,7 +5346,7 @@ - fi - - # CHECK ME: I think I busted this. -Ossama -- if test dlprefiles = "$prev"; then -+ if test "$prev" = dlprefiles; then - # Preload the old-style object. - func_append dlprefiles " $pic_object" - prev= -@@ -6759,23 +5354,23 @@ - - # A PIC object. - func_append libobjs " $pic_object" -- arg=$pic_object -+ arg="$pic_object" - fi - - # Non-PIC object. -- if test none != "$non_pic_object"; then -+ if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. -- non_pic_object=$xdir$non_pic_object -+ non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - func_append non_pic_objects " $non_pic_object" -- if test -z "$pic_object" || test none = "$pic_object"; then -- arg=$non_pic_object -+ if test -z "$pic_object" || test "$pic_object" = none ; then -+ arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. -- non_pic_object=$pic_object -+ non_pic_object="$pic_object" - func_append non_pic_objects " $non_pic_object" - fi - else -@@ -6783,7 +5378,7 @@ - if $opt_dry_run; then - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" -- xdir=$func_dirname_result -+ xdir="$func_dirname_result" - - func_lo2o "$arg" - pic_object=$xdir$objdir/$func_lo2o_result -@@ -6791,29 +5386,24 @@ - func_append libobjs " $pic_object" - func_append non_pic_objects " $non_pic_object" - else -- func_fatal_error "'$arg' is not a valid libtool object" -+ func_fatal_error "\`$arg' is not a valid libtool object" - fi - fi - done - else -- func_fatal_error "link input file '$arg' does not exist" -+ func_fatal_error "link input file \`$arg' does not exist" - fi - arg=$save_arg - prev= - continue - ;; -- os2dllname) -- os2dllname=$arg -- prev= -- continue -- ;; - precious_regex) -- precious_files_regex=$arg -+ precious_files_regex="$arg" - prev= - continue - ;; - release) -- release=-$arg -+ release="-$arg" - prev= - continue - ;; -@@ -6825,7 +5415,7 @@ - func_fatal_error "only absolute run-paths are allowed" - ;; - esac -- if test rpath = "$prev"; then -+ if test "$prev" = rpath; then - case "$rpath " in - *" $arg "*) ;; - *) func_append rpath " $arg" ;; -@@ -6840,7 +5430,7 @@ - continue - ;; - shrext) -- shrext_cmds=$arg -+ shrext_cmds="$arg" - prev= - continue - ;; -@@ -6880,7 +5470,7 @@ - esac - fi # test -n "$prev" - -- prevarg=$arg -+ prevarg="$arg" - - case $arg in - -all-static) -@@ -6894,7 +5484,7 @@ - - -allow-undefined) - # FIXME: remove this flag sometime in the future. -- func_fatal_error "'-allow-undefined' must not be used because it is the default" -+ func_fatal_error "\`-allow-undefined' must not be used because it is the default" - ;; - - -avoid-version) -@@ -6926,7 +5516,7 @@ - if test -n "$export_symbols" || test -n "$export_symbols_regex"; then - func_fatal_error "more than one -exported-symbols argument is not allowed" - fi -- if test X-export-symbols = "X$arg"; then -+ if test "X$arg" = "X-export-symbols"; then - prev=expsyms - else - prev=expsyms_regex -@@ -6960,9 +5550,9 @@ - func_stripname "-L" '' "$arg" - if test -z "$func_stripname_result"; then - if test "$#" -gt 0; then -- func_fatal_error "require no space between '-L' and '$1'" -+ func_fatal_error "require no space between \`-L' and \`$1'" - else -- func_fatal_error "need path for '-L' option" -+ func_fatal_error "need path for \`-L' option" - fi - fi - func_resolve_sysroot "$func_stripname_result" -@@ -6973,8 +5563,8 @@ - *) - absdir=`cd "$dir" && pwd` - test -z "$absdir" && \ -- func_fatal_error "cannot determine absolute directory name of '$dir'" -- dir=$absdir -+ func_fatal_error "cannot determine absolute directory name of \`$dir'" -+ dir="$absdir" - ;; - esac - case "$deplibs " in -@@ -7009,7 +5599,7 @@ - ;; - - -l*) -- if test X-lc = "X$arg" || test X-lm = "X$arg"; then -+ if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*) - # These systems don't actually have a C or math library (as such) -@@ -7017,11 +5607,11 @@ - ;; - *-*-os2*) - # These systems don't actually have a C library (as such) -- test X-lc = "X$arg" && continue -+ test "X$arg" = "X-lc" && continue - ;; -- *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*) -+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc due to us having libc/libc_r. -- test X-lc = "X$arg" && continue -+ test "X$arg" = "X-lc" && continue - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C and math libraries are in the System framework -@@ -7030,16 +5620,16 @@ - ;; - *-*-sco3.2v5* | *-*-sco5v6*) - # Causes problems with __ctype -- test X-lc = "X$arg" && continue -+ test "X$arg" = "X-lc" && continue - ;; - *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) - # Compiler inserts libc in the correct place for threads to work -- test X-lc = "X$arg" && continue -+ test "X$arg" = "X-lc" && continue - ;; - esac -- elif test X-lc_r = "X$arg"; then -+ elif test "X$arg" = "X-lc_r"; then - case $host in -- *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*) -+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc_r directly, use -pthread flag. - continue - ;; -@@ -7049,11 +5639,6 @@ - continue - ;; - -- -mllvm) -- prev=mllvm -- continue -- ;; -- - -module) - module=yes - continue -@@ -7083,7 +5668,7 @@ - ;; - - -multi_module) -- single_module=$wl-multi_module -+ single_module="${wl}-multi_module" - continue - ;; - -@@ -7097,8 +5682,8 @@ - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*) - # The PATH hackery in wrapper scripts is required on Windows - # and Darwin in order for the loader to find any dlls it needs. -- func_warning "'-no-install' is ignored for $host" -- func_warning "assuming '-no-fast-install' instead" -+ func_warning "\`-no-install' is ignored for $host" -+ func_warning "assuming \`-no-fast-install' instead" - fast_install=no - ;; - *) no_install=yes ;; -@@ -7116,11 +5701,6 @@ - continue - ;; - -- -os2dllname) -- prev=os2dllname -- continue -- ;; -- - -o) prev=output ;; - - -precious-files-regex) -@@ -7208,14 +5788,14 @@ - func_stripname '-Wc,' '' "$arg" - args=$func_stripname_result - arg= -- save_ifs=$IFS; IFS=, -+ save_ifs="$IFS"; IFS=',' - for flag in $args; do -- IFS=$save_ifs -+ IFS="$save_ifs" - func_quote_for_eval "$flag" - func_append arg " $func_quote_for_eval_result" - func_append compiler_flags " $func_quote_for_eval_result" - done -- IFS=$save_ifs -+ IFS="$save_ifs" - func_stripname ' ' '' "$arg" - arg=$func_stripname_result - ;; -@@ -7224,15 +5804,15 @@ - func_stripname '-Wl,' '' "$arg" - args=$func_stripname_result - arg= -- save_ifs=$IFS; IFS=, -+ save_ifs="$IFS"; IFS=',' - for flag in $args; do -- IFS=$save_ifs -+ IFS="$save_ifs" - func_quote_for_eval "$flag" - func_append arg " $wl$func_quote_for_eval_result" - func_append compiler_flags " $wl$func_quote_for_eval_result" - func_append linker_flags " $func_quote_for_eval_result" - done -- IFS=$save_ifs -+ IFS="$save_ifs" - func_stripname ' ' '' "$arg" - arg=$func_stripname_result - ;; -@@ -7255,7 +5835,7 @@ - # -msg_* for osf cc - -msg_*) - func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -+ arg="$func_quote_for_eval_result" - ;; - - # Flags to be passed through unchanged, with rationale: -@@ -7267,46 +5847,25 @@ - # -m*, -t[45]*, -txscale* architecture-specific flags for GCC - # -F/path path to uninstalled frameworks, gcc on darwin - # -p, -pg, --coverage, -fprofile-* profiling flags for GCC -- # -fstack-protector* stack protector flags for GCC - # @file GCC response files - # -tp=* Portland pgcc target processor selection - # --sysroot=* for sysroot support -- # -O*, -g*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization -- # -stdlib=* select c++ std lib with clang -+ # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization - -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ - -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \ -- -O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-fstack-protector*|-stdlib=*) -+ -O*|-flto*|-fwhopr*|-fuse-linker-plugin) - func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -+ arg="$func_quote_for_eval_result" - func_append compile_command " $arg" - func_append finalize_command " $arg" - func_append compiler_flags " $arg" - continue - ;; - -- -Z*) -- if test os2 = "`expr $host : '.*\(os2\)'`"; then -- # OS/2 uses -Zxxx to specify OS/2-specific options -- compiler_flags="$compiler_flags $arg" -- func_append compile_command " $arg" -- func_append finalize_command " $arg" -- case $arg in -- -Zlinker | -Zstack) -- prev=xcompiler -- ;; -- esac -- continue -- else -- # Otherwise treat like 'Some other compiler flag' below -- func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -- fi -- ;; -- - # Some other compiler flag. - -* | +*) - func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -+ arg="$func_quote_for_eval_result" - ;; - - *.$objext) -@@ -7327,21 +5886,21 @@ - - if test -z "$pic_object" || - test -z "$non_pic_object" || -- test none = "$pic_object" && -- test none = "$non_pic_object"; then -- func_fatal_error "cannot find name of object for '$arg'" -+ test "$pic_object" = none && -+ test "$non_pic_object" = none; then -+ func_fatal_error "cannot find name of object for \`$arg'" - fi - - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" -- xdir=$func_dirname_result -+ xdir="$func_dirname_result" - -- test none = "$pic_object" || { -+ if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. -- pic_object=$xdir$pic_object -+ pic_object="$xdir$pic_object" - -- if test dlfiles = "$prev"; then -- if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then -+ if test "$prev" = dlfiles; then -+ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - func_append dlfiles " $pic_object" - prev= - continue -@@ -7352,7 +5911,7 @@ - fi - - # CHECK ME: I think I busted this. -Ossama -- if test dlprefiles = "$prev"; then -+ if test "$prev" = dlprefiles; then - # Preload the old-style object. - func_append dlprefiles " $pic_object" - prev= -@@ -7360,23 +5919,23 @@ - - # A PIC object. - func_append libobjs " $pic_object" -- arg=$pic_object -- } -+ arg="$pic_object" -+ fi - - # Non-PIC object. -- if test none != "$non_pic_object"; then -+ if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. -- non_pic_object=$xdir$non_pic_object -+ non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - func_append non_pic_objects " $non_pic_object" -- if test -z "$pic_object" || test none = "$pic_object"; then -- arg=$non_pic_object -+ if test -z "$pic_object" || test "$pic_object" = none ; then -+ arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. -- non_pic_object=$pic_object -+ non_pic_object="$pic_object" - func_append non_pic_objects " $non_pic_object" - fi - else -@@ -7384,7 +5943,7 @@ - if $opt_dry_run; then - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" -- xdir=$func_dirname_result -+ xdir="$func_dirname_result" - - func_lo2o "$arg" - pic_object=$xdir$objdir/$func_lo2o_result -@@ -7392,7 +5951,7 @@ - func_append libobjs " $pic_object" - func_append non_pic_objects " $non_pic_object" - else -- func_fatal_error "'$arg' is not a valid libtool object" -+ func_fatal_error "\`$arg' is not a valid libtool object" - fi - fi - ;; -@@ -7408,11 +5967,11 @@ - # A libtool-controlled library. - - func_resolve_sysroot "$arg" -- if test dlfiles = "$prev"; then -+ if test "$prev" = dlfiles; then - # This library was specified with -dlopen. - func_append dlfiles " $func_resolve_sysroot_result" - prev= -- elif test dlprefiles = "$prev"; then -+ elif test "$prev" = dlprefiles; then - # The library was specified with -dlpreopen. - func_append dlprefiles " $func_resolve_sysroot_result" - prev= -@@ -7427,7 +5986,7 @@ - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -+ arg="$func_quote_for_eval_result" - ;; - esac # arg - -@@ -7439,9 +5998,9 @@ - done # argument parsing loop - - test -n "$prev" && \ -- func_fatal_help "the '$prevarg' option requires an argument" -+ func_fatal_help "the \`$prevarg' option requires an argument" - -- if test yes = "$export_dynamic" && test -n "$export_dynamic_flag_spec"; then -+ if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then - eval arg=\"$export_dynamic_flag_spec\" - func_append compile_command " $arg" - func_append finalize_command " $arg" -@@ -7450,23 +6009,20 @@ - oldlibs= - # calculate the name of the file, without its directory - func_basename "$output" -- outputname=$func_basename_result -- libobjs_save=$libobjs -+ outputname="$func_basename_result" -+ libobjs_save="$libobjs" - - if test -n "$shlibpath_var"; then - # get the directories listed in $shlibpath_var -- eval shlib_search_path=\`\$ECHO \"\$$shlibpath_var\" \| \$SED \'s/:/ /g\'\` -+ eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\` - else - shlib_search_path= - fi - eval sys_lib_search_path=\"$sys_lib_search_path_spec\" - eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" - -- # Definition is injected by LT_CONFIG during libtool generation. -- func_munge_path_list sys_lib_dlsearch_path "$LT_SYS_LIBRARY_PATH" -- - func_dirname "$output" "/" "" -- output_objdir=$func_dirname_result$objdir -+ output_objdir="$func_dirname_result$objdir" - func_to_tool_file "$output_objdir/" - tool_output_objdir=$func_to_tool_file_result - # Create the object directory. -@@ -7489,7 +6045,7 @@ - # Find all interdependent deplibs by searching for libraries - # that are linked more than once (e.g. -la -lb -la) - for deplib in $deplibs; do -- if $opt_preserve_dup_deps; then -+ if $opt_preserve_dup_deps ; then - case "$libs " in - *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac -@@ -7497,7 +6053,7 @@ - func_append libs " $deplib" - done - -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib; then - libs="$predeps $libs $compiler_lib_search_path $postdeps" - - # Compute libraries that are listed more than once in $predeps -@@ -7529,7 +6085,7 @@ - case $file in - *.la) ;; - *) -- func_fatal_help "libraries can '-dlopen' only libtool libraries: $file" -+ func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file" - ;; - esac - done -@@ -7537,7 +6093,7 @@ - prog) - compile_deplibs= - finalize_deplibs= -- alldeplibs=false -+ alldeplibs=no - newdlfiles= - newdlprefiles= - passes="conv scan dlopen dlpreopen link" -@@ -7549,29 +6105,29 @@ - for pass in $passes; do - # The preopen pass in lib mode reverses $deplibs; put it back here - # so that -L comes before libs that need it for instance... -- if test lib,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "lib,link"; then - ## FIXME: Find the place where the list is rebuilt in the wrong - ## order, and fix it there properly - tmp_deplibs= - for deplib in $deplibs; do - tmp_deplibs="$deplib $tmp_deplibs" - done -- deplibs=$tmp_deplibs -+ deplibs="$tmp_deplibs" - fi - -- if test lib,link = "$linkmode,$pass" || -- test prog,scan = "$linkmode,$pass"; then -- libs=$deplibs -+ if test "$linkmode,$pass" = "lib,link" || -+ test "$linkmode,$pass" = "prog,scan"; then -+ libs="$deplibs" - deplibs= - fi -- if test prog = "$linkmode"; then -+ if test "$linkmode" = prog; then - case $pass in -- dlopen) libs=$dlfiles ;; -- dlpreopen) libs=$dlprefiles ;; -+ dlopen) libs="$dlfiles" ;; -+ dlpreopen) libs="$dlprefiles" ;; - link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; - esac - fi -- if test lib,dlpreopen = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "lib,dlpreopen"; then - # Collect and forward deplibs of preopened libtool libs - for lib in $dlprefiles; do - # Ignore non-libtool-libs -@@ -7592,26 +6148,26 @@ - esac - done - done -- libs=$dlprefiles -+ libs="$dlprefiles" - fi -- if test dlopen = "$pass"; then -+ if test "$pass" = dlopen; then - # Collect dlpreopened libraries -- save_deplibs=$deplibs -+ save_deplibs="$deplibs" - deplibs= - fi - - for deplib in $libs; do - lib= -- found=false -+ found=no - case $deplib in - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \ - |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*) -- if test prog,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - func_append compiler_flags " $deplib" -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib ; then - case "$new_inherited_linker_flags " in - *" $deplib "*) ;; - * ) func_append new_inherited_linker_flags " $deplib" ;; -@@ -7621,13 +6177,13 @@ - continue - ;; - -l*) -- if test lib != "$linkmode" && test prog != "$linkmode"; then -- func_warning "'-l' is ignored for archives/objects" -+ if test "$linkmode" != lib && test "$linkmode" != prog; then -+ func_warning "\`-l' is ignored for archives/objects" - continue - fi - func_stripname '-l' '' "$deplib" - name=$func_stripname_result -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib; then - searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path" - else - searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path" -@@ -7635,22 +6191,31 @@ - for searchdir in $searchdirs; do - for search_ext in .la $std_shrext .so .a; do - # Search the libtool library -- lib=$searchdir/lib$name$search_ext -+ lib="$searchdir/lib${name}${search_ext}" - if test -f "$lib"; then -- if test .la = "$search_ext"; then -- found=: -+ if test "$search_ext" = ".la"; then -+ found=yes - else -- found=false -+ found=no - fi - break 2 - fi - done - done -- if $found; then -- # deplib is a libtool library -+ if test "$found" != yes; then -+ # deplib doesn't seem to be a libtool library -+ if test "$linkmode,$pass" = "prog,link"; then -+ compile_deplibs="$deplib $compile_deplibs" -+ finalize_deplibs="$deplib $finalize_deplibs" -+ else -+ deplibs="$deplib $deplibs" -+ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" -+ fi -+ continue -+ else # deplib is a libtool library - # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, - # We need to do some special things here, and not later. -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $deplib "*) - if func_lalib_p "$lib"; then -@@ -7658,19 +6223,19 @@ - old_library= - func_source "$lib" - for l in $old_library $library_names; do -- ll=$l -+ ll="$l" - done -- if test "X$ll" = "X$old_library"; then # only static version available -- found=false -+ if test "X$ll" = "X$old_library" ; then # only static version available -+ found=no - func_dirname "$lib" "" "." -- ladir=$func_dirname_result -+ ladir="$func_dirname_result" - lib=$ladir/$old_library -- if test prog,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" -- test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs" -+ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" - fi - continue - fi -@@ -7679,25 +6244,15 @@ - *) ;; - esac - fi -- else -- # deplib doesn't seem to be a libtool library -- if test prog,link = "$linkmode,$pass"; then -- compile_deplibs="$deplib $compile_deplibs" -- finalize_deplibs="$deplib $finalize_deplibs" -- else -- deplibs="$deplib $deplibs" -- test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs" -- fi -- continue - fi - ;; # -l - *.ltframework) -- if test prog,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib ; then - case "$new_inherited_linker_flags " in - *" $deplib "*) ;; - * ) func_append new_inherited_linker_flags " $deplib" ;; -@@ -7710,18 +6265,18 @@ - case $linkmode in - lib) - deplibs="$deplib $deplibs" -- test conv = "$pass" && continue -+ test "$pass" = conv && continue - newdependency_libs="$deplib $newdependency_libs" - func_stripname '-L' '' "$deplib" - func_resolve_sysroot "$func_stripname_result" - func_append newlib_search_path " $func_resolve_sysroot_result" - ;; - prog) -- if test conv = "$pass"; then -+ if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi -- if test scan = "$pass"; then -+ if test "$pass" = scan; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" -@@ -7732,13 +6287,13 @@ - func_append newlib_search_path " $func_resolve_sysroot_result" - ;; - *) -- func_warning "'-L' is ignored for archives/objects" -+ func_warning "\`-L' is ignored for archives/objects" - ;; - esac # linkmode - continue - ;; # -L - -R*) -- if test link = "$pass"; then -+ if test "$pass" = link; then - func_stripname '-R' '' "$deplib" - func_resolve_sysroot "$func_stripname_result" - dir=$func_resolve_sysroot_result -@@ -7756,7 +6311,7 @@ - lib=$func_resolve_sysroot_result - ;; - *.$libext) -- if test conv = "$pass"; then -+ if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi -@@ -7767,26 +6322,21 @@ - case " $dlpreconveniencelibs " in - *" $deplib "*) ;; - *) -- valid_a_lib=false -+ valid_a_lib=no - case $deplibs_check_method in - match_pattern*) - set dummy $deplibs_check_method; shift - match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` - if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \ - | $EGREP "$match_pattern_regex" > /dev/null; then -- valid_a_lib=: -+ valid_a_lib=yes - fi - ;; - pass_all) -- valid_a_lib=: -+ valid_a_lib=yes - ;; - esac -- if $valid_a_lib; then -- echo -- $ECHO "*** Warning: Linking the shared library $output against the" -- $ECHO "*** static library $deplib is not portable!" -- deplibs="$deplib $deplibs" -- else -+ if test "$valid_a_lib" != yes; then - echo - $ECHO "*** Warning: Trying to link with static lib archive $deplib." - echo "*** I have the capability to make that library automatically link in when" -@@ -7794,13 +6344,18 @@ - echo "*** shared version of the library, which you do not appear to have" - echo "*** because the file extensions .$libext of this argument makes me believe" - echo "*** that it is just a static archive that I should not use here." -+ else -+ echo -+ $ECHO "*** Warning: Linking the shared library $output against the" -+ $ECHO "*** static library $deplib is not portable!" -+ deplibs="$deplib $deplibs" - fi - ;; - esac - continue - ;; - prog) -- if test link != "$pass"; then -+ if test "$pass" != link; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" -@@ -7811,10 +6366,10 @@ - esac # linkmode - ;; # *.$libext - *.lo | *.$objext) -- if test conv = "$pass"; then -+ if test "$pass" = conv; then - deplibs="$deplib $deplibs" -- elif test prog = "$linkmode"; then -- if test dlpreopen = "$pass" || test yes != "$dlopen_support" || test no = "$build_libtool_libs"; then -+ elif test "$linkmode" = prog; then -+ if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then - # If there is no dlopen support or we're linking statically, - # we need to preload. - func_append newdlprefiles " $deplib" -@@ -7827,20 +6382,22 @@ - continue - ;; - %DEPLIBS%) -- alldeplibs=: -+ alldeplibs=yes - continue - ;; - esac # case $deplib - -- $found || test -f "$lib" \ -- || func_fatal_error "cannot find the library '$lib' or unhandled argument '$deplib'" -+ if test "$found" = yes || test -f "$lib"; then : -+ else -+ func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'" -+ fi - - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$lib" \ -- || func_fatal_error "'$lib' is not a valid libtool archive" -+ || func_fatal_error "\`$lib' is not a valid libtool archive" - - func_dirname "$lib" "" "." -- ladir=$func_dirname_result -+ ladir="$func_dirname_result" - - dlname= - dlopen= -@@ -7870,30 +6427,30 @@ - done - fi - dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` -- if test lib,link = "$linkmode,$pass" || -- test prog,scan = "$linkmode,$pass" || -- { test prog != "$linkmode" && test lib != "$linkmode"; }; then -+ if test "$linkmode,$pass" = "lib,link" || -+ test "$linkmode,$pass" = "prog,scan" || -+ { test "$linkmode" != prog && test "$linkmode" != lib; }; then - test -n "$dlopen" && func_append dlfiles " $dlopen" - test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen" - fi - -- if test conv = "$pass"; then -+ if test "$pass" = conv; then - # Only check for convenience libraries - deplibs="$lib $deplibs" - if test -z "$libdir"; then - if test -z "$old_library"; then -- func_fatal_error "cannot find name of link library for '$lib'" -+ func_fatal_error "cannot find name of link library for \`$lib'" - fi - # It is a libtool convenience library, so add in its objects. - func_append convenience " $ladir/$objdir/$old_library" - func_append old_convenience " $ladir/$objdir/$old_library" -- elif test prog != "$linkmode" && test lib != "$linkmode"; then -- func_fatal_error "'$lib' is not a convenience library" -+ elif test "$linkmode" != prog && test "$linkmode" != lib; then -+ func_fatal_error "\`$lib' is not a convenience library" - fi - tmp_libs= - for deplib in $dependency_libs; do - deplibs="$deplib $deplibs" -- if $opt_preserve_dup_deps; then -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in - *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac -@@ -7907,26 +6464,26 @@ - # Get the name of the library we link against. - linklib= - if test -n "$old_library" && -- { test yes = "$prefer_static_libs" || -- test built,no = "$prefer_static_libs,$installed"; }; then -+ { test "$prefer_static_libs" = yes || -+ test "$prefer_static_libs,$installed" = "built,no"; }; then - linklib=$old_library - else - for l in $old_library $library_names; do -- linklib=$l -+ linklib="$l" - done - fi - if test -z "$linklib"; then -- func_fatal_error "cannot find name of link library for '$lib'" -+ func_fatal_error "cannot find name of link library for \`$lib'" - fi - - # This library was specified with -dlopen. -- if test dlopen = "$pass"; then -- test -z "$libdir" \ -- && func_fatal_error "cannot -dlopen a convenience library: '$lib'" -+ if test "$pass" = dlopen; then -+ if test -z "$libdir"; then -+ func_fatal_error "cannot -dlopen a convenience library: \`$lib'" -+ fi - if test -z "$dlname" || -- test yes != "$dlopen_support" || -- test no = "$build_libtool_libs" -- then -+ test "$dlopen_support" != yes || -+ test "$build_libtool_libs" = no; then - # If there is no dlname, no dlopen support or we're linking - # statically, we need to preload. We also need to preload any - # dependent libraries so libltdl's deplib preloader doesn't -@@ -7940,40 +6497,40 @@ - - # We need an absolute path. - case $ladir in -- [\\/]* | [A-Za-z]:[\\/]*) abs_ladir=$ladir ;; -+ [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; - *) - abs_ladir=`cd "$ladir" && pwd` - if test -z "$abs_ladir"; then -- func_warning "cannot determine absolute directory name of '$ladir'" -+ func_warning "cannot determine absolute directory name of \`$ladir'" - func_warning "passing it literally to the linker, although it might fail" -- abs_ladir=$ladir -+ abs_ladir="$ladir" - fi - ;; - esac - func_basename "$lib" -- laname=$func_basename_result -+ laname="$func_basename_result" - - # Find the relevant object directory and library name. -- if test yes = "$installed"; then -+ if test "X$installed" = Xyes; then - if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then -- func_warning "library '$lib' was moved." -- dir=$ladir -- absdir=$abs_ladir -- libdir=$abs_ladir -+ func_warning "library \`$lib' was moved." -+ dir="$ladir" -+ absdir="$abs_ladir" -+ libdir="$abs_ladir" - else -- dir=$lt_sysroot$libdir -- absdir=$lt_sysroot$libdir -+ dir="$lt_sysroot$libdir" -+ absdir="$lt_sysroot$libdir" - fi -- test yes = "$hardcode_automatic" && avoidtemprpath=yes -+ test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes - else - if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then -- dir=$ladir -- absdir=$abs_ladir -+ dir="$ladir" -+ absdir="$abs_ladir" - # Remove this search path later - func_append notinst_path " $abs_ladir" - else -- dir=$ladir/$objdir -- absdir=$abs_ladir/$objdir -+ dir="$ladir/$objdir" -+ absdir="$abs_ladir/$objdir" - # Remove this search path later - func_append notinst_path " $abs_ladir" - fi -@@ -7982,11 +6539,11 @@ - name=$func_stripname_result - - # This library was specified with -dlpreopen. -- if test dlpreopen = "$pass"; then -- if test -z "$libdir" && test prog = "$linkmode"; then -- func_fatal_error "only libraries may -dlpreopen a convenience library: '$lib'" -+ if test "$pass" = dlpreopen; then -+ if test -z "$libdir" && test "$linkmode" = prog; then -+ func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'" - fi -- case $host in -+ case "$host" in - # special handling for platforms with PE-DLLs. - *cygwin* | *mingw* | *cegcc* ) - # Linker will automatically link against shared library if both -@@ -8030,9 +6587,9 @@ - - if test -z "$libdir"; then - # Link the convenience library -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib; then - deplibs="$dir/$old_library $deplibs" -- elif test prog,link = "$linkmode,$pass"; then -+ elif test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$dir/$old_library $compile_deplibs" - finalize_deplibs="$dir/$old_library $finalize_deplibs" - else -@@ -8042,14 +6599,14 @@ - fi - - -- if test prog = "$linkmode" && test link != "$pass"; then -+ if test "$linkmode" = prog && test "$pass" != link; then - func_append newlib_search_path " $ladir" - deplibs="$lib $deplibs" - -- linkalldeplibs=false -- if test no != "$link_all_deplibs" || test -z "$library_names" || -- test no = "$build_libtool_libs"; then -- linkalldeplibs=: -+ linkalldeplibs=no -+ if test "$link_all_deplibs" != no || test -z "$library_names" || -+ test "$build_libtool_libs" = no; then -+ linkalldeplibs=yes - fi - - tmp_libs= -@@ -8061,14 +6618,14 @@ - ;; - esac - # Need to link against all dependency_libs? -- if $linkalldeplibs; then -+ if test "$linkalldeplibs" = yes; then - deplibs="$deplib $deplibs" - else - # Need to hardcode shared library paths - # or/and link against static libraries - newdependency_libs="$deplib $newdependency_libs" - fi -- if $opt_preserve_dup_deps; then -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in - *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac -@@ -8078,15 +6635,15 @@ - continue - fi # $linkmode = prog... - -- if test prog,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "prog,link"; then - if test -n "$library_names" && -- { { test no = "$prefer_static_libs" || -- test built,yes = "$prefer_static_libs,$installed"; } || -+ { { test "$prefer_static_libs" = no || -+ test "$prefer_static_libs,$installed" = "built,yes"; } || - test -z "$old_library"; }; then - # We need to hardcode the library path -- if test -n "$shlibpath_var" && test -z "$avoidtemprpath"; then -+ if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then - # Make sure the rpath contains only unique directories. -- case $temp_rpath: in -+ case "$temp_rpath:" in - *"$absdir:"*) ;; - *) func_append temp_rpath "$absdir:" ;; - esac -@@ -8115,9 +6672,9 @@ - esac - fi # $linkmode,$pass = prog,link... - -- if $alldeplibs && -- { test pass_all = "$deplibs_check_method" || -- { test yes = "$build_libtool_libs" && -+ if test "$alldeplibs" = yes && -+ { test "$deplibs_check_method" = pass_all || -+ { test "$build_libtool_libs" = yes && - test -n "$library_names"; }; }; then - # We only need to search for static libraries - continue -@@ -8126,19 +6683,19 @@ - - link_static=no # Whether the deplib will be linked statically - use_static_libs=$prefer_static_libs -- if test built = "$use_static_libs" && test yes = "$installed"; then -+ if test "$use_static_libs" = built && test "$installed" = yes; then - use_static_libs=no - fi - if test -n "$library_names" && -- { test no = "$use_static_libs" || test -z "$old_library"; }; then -+ { test "$use_static_libs" = no || test -z "$old_library"; }; then - case $host in -- *cygwin* | *mingw* | *cegcc* | *os2*) -+ *cygwin* | *mingw* | *cegcc*) - # No point in relinking DLLs because paths are not encoded - func_append notinst_deplibs " $lib" - need_relink=no - ;; - *) -- if test no = "$installed"; then -+ if test "$installed" = no; then - func_append notinst_deplibs " $lib" - need_relink=yes - fi -@@ -8148,24 +6705,24 @@ - - # Warn about portability, can't link against -module's on some - # systems (darwin). Don't bleat about dlopened modules though! -- dlopenmodule= -+ dlopenmodule="" - for dlpremoduletest in $dlprefiles; do - if test "X$dlpremoduletest" = "X$lib"; then -- dlopenmodule=$dlpremoduletest -+ dlopenmodule="$dlpremoduletest" - break - fi - done -- if test -z "$dlopenmodule" && test yes = "$shouldnotlink" && test link = "$pass"; then -+ if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then - echo -- if test prog = "$linkmode"; then -+ if test "$linkmode" = prog; then - $ECHO "*** Warning: Linking the executable $output against the loadable module" - else - $ECHO "*** Warning: Linking the shared library $output against the loadable module" - fi - $ECHO "*** $linklib is not portable!" - fi -- if test lib = "$linkmode" && -- test yes = "$hardcode_into_libs"; then -+ if test "$linkmode" = lib && -+ test "$hardcode_into_libs" = yes; then - # Hardcode the library path. - # Skip directories that are in the system default run-time - # search path. -@@ -8193,43 +6750,43 @@ - # figure out the soname - set dummy $library_names - shift -- realname=$1 -+ realname="$1" - shift - libname=`eval "\\$ECHO \"$libname_spec\""` - # use dlname if we got it. it's perfectly good, no? - if test -n "$dlname"; then -- soname=$dlname -+ soname="$dlname" - elif test -n "$soname_spec"; then - # bleh windows - case $host in -- *cygwin* | mingw* | *cegcc* | *os2*) -+ *cygwin* | mingw* | *cegcc*) - func_arith $current - $age - major=$func_arith_result -- versuffix=-$major -+ versuffix="-$major" - ;; - esac - eval soname=\"$soname_spec\" - else -- soname=$realname -+ soname="$realname" - fi - - # Make a new name for the extract_expsyms_cmds to use -- soroot=$soname -+ soroot="$soname" - func_basename "$soroot" -- soname=$func_basename_result -+ soname="$func_basename_result" - func_stripname 'lib' '.dll' "$soname" - newlib=libimp-$func_stripname_result.a - - # If the library has no export list, then create one now - if test -f "$output_objdir/$soname-def"; then : - else -- func_verbose "extracting exported symbol list from '$soname'" -+ func_verbose "extracting exported symbol list from \`$soname'" - func_execute_cmds "$extract_expsyms_cmds" 'exit $?' - fi - - # Create $newlib - if test -f "$output_objdir/$newlib"; then :; else -- func_verbose "generating import library for '$soname'" -+ func_verbose "generating import library for \`$soname'" - func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?' - fi - # make sure the library variables are pointing to the new library -@@ -8237,58 +6794,58 @@ - linklib=$newlib - fi # test -n "$old_archive_from_expsyms_cmds" - -- if test prog = "$linkmode" || test relink != "$opt_mode"; then -+ if test "$linkmode" = prog || test "$opt_mode" != relink; then - add_shlibpath= - add_dir= - add= - lib_linked=yes - case $hardcode_action in - immediate | unsupported) -- if test no = "$hardcode_direct"; then -- add=$dir/$linklib -+ if test "$hardcode_direct" = no; then -+ add="$dir/$linklib" - case $host in -- *-*-sco3.2v5.0.[024]*) add_dir=-L$dir ;; -- *-*-sysv4*uw2*) add_dir=-L$dir ;; -+ *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;; -+ *-*-sysv4*uw2*) add_dir="-L$dir" ;; - *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ -- *-*-unixware7*) add_dir=-L$dir ;; -+ *-*-unixware7*) add_dir="-L$dir" ;; - *-*-darwin* ) -- # if the lib is a (non-dlopened) module then we cannot -+ # if the lib is a (non-dlopened) module then we can not - # link against it, someone is ignoring the earlier warnings - if /usr/bin/file -L $add 2> /dev/null | -- $GREP ": [^:]* bundle" >/dev/null; then -+ $GREP ": [^:]* bundle" >/dev/null ; then - if test "X$dlopenmodule" != "X$lib"; then - $ECHO "*** Warning: lib $linklib is a module, not a shared library" -- if test -z "$old_library"; then -+ if test -z "$old_library" ; then - echo - echo "*** And there doesn't seem to be a static archive available" - echo "*** The link will probably fail, sorry" - else -- add=$dir/$old_library -+ add="$dir/$old_library" - fi - elif test -n "$old_library"; then -- add=$dir/$old_library -+ add="$dir/$old_library" - fi - fi - esac -- elif test no = "$hardcode_minus_L"; then -+ elif test "$hardcode_minus_L" = no; then - case $host in -- *-*-sunos*) add_shlibpath=$dir ;; -+ *-*-sunos*) add_shlibpath="$dir" ;; - esac -- add_dir=-L$dir -- add=-l$name -- elif test no = "$hardcode_shlibpath_var"; then -- add_shlibpath=$dir -- add=-l$name -+ add_dir="-L$dir" -+ add="-l$name" -+ elif test "$hardcode_shlibpath_var" = no; then -+ add_shlibpath="$dir" -+ add="-l$name" - else - lib_linked=no - fi - ;; - relink) -- if test yes = "$hardcode_direct" && -- test no = "$hardcode_direct_absolute"; then -- add=$dir/$linklib -- elif test yes = "$hardcode_minus_L"; then -- add_dir=-L$absdir -+ if test "$hardcode_direct" = yes && -+ test "$hardcode_direct_absolute" = no; then -+ add="$dir/$linklib" -+ elif test "$hardcode_minus_L" = yes; then -+ add_dir="-L$absdir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in -@@ -8297,10 +6854,10 @@ - ;; - esac - fi -- add=-l$name -- elif test yes = "$hardcode_shlibpath_var"; then -- add_shlibpath=$dir -- add=-l$name -+ add="-l$name" -+ elif test "$hardcode_shlibpath_var" = yes; then -+ add_shlibpath="$dir" -+ add="-l$name" - else - lib_linked=no - fi -@@ -8308,7 +6865,7 @@ - *) lib_linked=no ;; - esac - -- if test yes != "$lib_linked"; then -+ if test "$lib_linked" != yes; then - func_fatal_configuration "unsupported hardcode properties" - fi - -@@ -8318,15 +6875,15 @@ - *) func_append compile_shlibpath "$add_shlibpath:" ;; - esac - fi -- if test prog = "$linkmode"; then -+ if test "$linkmode" = prog; then - test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" - test -n "$add" && compile_deplibs="$add $compile_deplibs" - else - test -n "$add_dir" && deplibs="$add_dir $deplibs" - test -n "$add" && deplibs="$add $deplibs" -- if test yes != "$hardcode_direct" && -- test yes != "$hardcode_minus_L" && -- test yes = "$hardcode_shlibpath_var"; then -+ if test "$hardcode_direct" != yes && -+ test "$hardcode_minus_L" != yes && -+ test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) func_append finalize_shlibpath "$libdir:" ;; -@@ -8335,33 +6892,33 @@ - fi - fi - -- if test prog = "$linkmode" || test relink = "$opt_mode"; then -+ if test "$linkmode" = prog || test "$opt_mode" = relink; then - add_shlibpath= - add_dir= - add= - # Finalize command for both is simple: just hardcode it. -- if test yes = "$hardcode_direct" && -- test no = "$hardcode_direct_absolute"; then -- add=$libdir/$linklib -- elif test yes = "$hardcode_minus_L"; then -- add_dir=-L$libdir -- add=-l$name -- elif test yes = "$hardcode_shlibpath_var"; then -+ if test "$hardcode_direct" = yes && -+ test "$hardcode_direct_absolute" = no; then -+ add="$libdir/$linklib" -+ elif test "$hardcode_minus_L" = yes; then -+ add_dir="-L$libdir" -+ add="-l$name" -+ elif test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) func_append finalize_shlibpath "$libdir:" ;; - esac -- add=-l$name -- elif test yes = "$hardcode_automatic"; then -+ add="-l$name" -+ elif test "$hardcode_automatic" = yes; then - if test -n "$inst_prefix_dir" && -- test -f "$inst_prefix_dir$libdir/$linklib"; then -- add=$inst_prefix_dir$libdir/$linklib -+ test -f "$inst_prefix_dir$libdir/$linklib" ; then -+ add="$inst_prefix_dir$libdir/$linklib" - else -- add=$libdir/$linklib -+ add="$libdir/$linklib" - fi - else - # We cannot seem to hardcode it, guess we'll fake it. -- add_dir=-L$libdir -+ add_dir="-L$libdir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in -@@ -8370,10 +6927,10 @@ - ;; - esac - fi -- add=-l$name -+ add="-l$name" - fi - -- if test prog = "$linkmode"; then -+ if test "$linkmode" = prog; then - test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" - test -n "$add" && finalize_deplibs="$add $finalize_deplibs" - else -@@ -8381,43 +6938,43 @@ - test -n "$add" && deplibs="$add $deplibs" - fi - fi -- elif test prog = "$linkmode"; then -+ elif test "$linkmode" = prog; then - # Here we assume that one of hardcode_direct or hardcode_minus_L - # is not unsupported. This is valid on all known static and - # shared platforms. -- if test unsupported != "$hardcode_direct"; then -- test -n "$old_library" && linklib=$old_library -+ if test "$hardcode_direct" != unsupported; then -+ test -n "$old_library" && linklib="$old_library" - compile_deplibs="$dir/$linklib $compile_deplibs" - finalize_deplibs="$dir/$linklib $finalize_deplibs" - else - compile_deplibs="-l$name -L$dir $compile_deplibs" - finalize_deplibs="-l$name -L$dir $finalize_deplibs" - fi -- elif test yes = "$build_libtool_libs"; then -+ elif test "$build_libtool_libs" = yes; then - # Not a shared library -- if test pass_all != "$deplibs_check_method"; then -+ if test "$deplibs_check_method" != pass_all; then - # We're trying link a shared library against a static one - # but the system doesn't support it. - - # Just print a warning and add the library to dependency_libs so - # that the program can be linked against the static library. - echo -- $ECHO "*** Warning: This system cannot link to static lib archive $lib." -+ $ECHO "*** Warning: This system can not link to static lib archive $lib." - echo "*** I have the capability to make that library automatically link in when" - echo "*** you link to this library. But I can only do this if you have a" - echo "*** shared version of the library, which you do not appear to have." -- if test yes = "$module"; then -+ if test "$module" = yes; then - echo "*** But as you try to build a module library, libtool will still create " - echo "*** a static module, that should work as long as the dlopening application" - echo "*** is linked with the -dlopen flag to resolve symbols at runtime." - if test -z "$global_symbol_pipe"; then - echo - echo "*** However, this would only work if libtool was able to extract symbol" -- echo "*** lists from a program, using 'nm' or equivalent, but libtool could" -+ echo "*** lists from a program, using \`nm' or equivalent, but libtool could" - echo "*** not find such a program. So, this module is probably useless." -- echo "*** 'nm' from GNU binutils and a full rebuild may help." -+ echo "*** \`nm' from GNU binutils and a full rebuild may help." - fi -- if test no = "$build_old_libs"; then -+ if test "$build_old_libs" = no; then - build_libtool_libs=module - build_old_libs=yes - else -@@ -8430,11 +6987,11 @@ - fi - fi # link shared/static library? - -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib; then - if test -n "$dependency_libs" && -- { test yes != "$hardcode_into_libs" || -- test yes = "$build_old_libs" || -- test yes = "$link_static"; }; then -+ { test "$hardcode_into_libs" != yes || -+ test "$build_old_libs" = yes || -+ test "$link_static" = yes; }; then - # Extract -R from dependency_libs - temp_deplibs= - for libdir in $dependency_libs; do -@@ -8448,12 +7005,12 @@ - *) func_append temp_deplibs " $libdir";; - esac - done -- dependency_libs=$temp_deplibs -+ dependency_libs="$temp_deplibs" - fi - - func_append newlib_search_path " $absdir" - # Link against this library -- test no = "$link_static" && newdependency_libs="$abs_ladir/$laname $newdependency_libs" -+ test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" - # ... and its dependency_libs - tmp_libs= - for deplib in $dependency_libs; do -@@ -8463,7 +7020,7 @@ - func_resolve_sysroot "$func_stripname_result";; - *) func_resolve_sysroot "$deplib" ;; - esac -- if $opt_preserve_dup_deps; then -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in - *" $func_resolve_sysroot_result "*) - func_append specialdeplibs " $func_resolve_sysroot_result" ;; -@@ -8472,12 +7029,12 @@ - func_append tmp_libs " $func_resolve_sysroot_result" - done - -- if test no != "$link_all_deplibs"; then -+ if test "$link_all_deplibs" != no; then - # Add the search paths of all dependency libraries - for deplib in $dependency_libs; do - path= - case $deplib in -- -L*) path=$deplib ;; -+ -L*) path="$deplib" ;; - *.la) - func_resolve_sysroot "$deplib" - deplib=$func_resolve_sysroot_result -@@ -8485,12 +7042,12 @@ - dir=$func_dirname_result - # We need an absolute path. - case $dir in -- [\\/]* | [A-Za-z]:[\\/]*) absdir=$dir ;; -+ [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; - *) - absdir=`cd "$dir" && pwd` - if test -z "$absdir"; then -- func_warning "cannot determine absolute directory name of '$dir'" -- absdir=$dir -+ func_warning "cannot determine absolute directory name of \`$dir'" -+ absdir="$dir" - fi - ;; - esac -@@ -8498,35 +7055,35 @@ - case $host in - *-*-darwin*) - depdepl= -- eval deplibrary_names=`$SED -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` -- if test -n "$deplibrary_names"; then -- for tmp in $deplibrary_names; do -+ eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` -+ if test -n "$deplibrary_names" ; then -+ for tmp in $deplibrary_names ; do - depdepl=$tmp - done -- if test -f "$absdir/$objdir/$depdepl"; then -- depdepl=$absdir/$objdir/$depdepl -- darwin_install_name=`$OTOOL -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` -+ if test -f "$absdir/$objdir/$depdepl" ; then -+ depdepl="$absdir/$objdir/$depdepl" -+ darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` - if test -z "$darwin_install_name"; then -- darwin_install_name=`$OTOOL64 -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` -+ darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` - fi -- func_append compiler_flags " $wl-dylib_file $wl$darwin_install_name:$depdepl" -- func_append linker_flags " -dylib_file $darwin_install_name:$depdepl" -+ func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" -+ func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}" - path= - fi - fi - ;; - *) -- path=-L$absdir/$objdir -+ path="-L$absdir/$objdir" - ;; - esac - else -- eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` - test -z "$libdir" && \ -- func_fatal_error "'$deplib' is not a valid libtool archive" -+ func_fatal_error "\`$deplib' is not a valid libtool archive" - test "$absdir" != "$libdir" && \ -- func_warning "'$deplib' seems to be moved" -+ func_warning "\`$deplib' seems to be moved" - -- path=-L$absdir -+ path="-L$absdir" - fi - ;; - esac -@@ -8538,23 +7095,23 @@ - fi # link_all_deplibs != no - fi # linkmode = lib - done # for deplib in $libs -- if test link = "$pass"; then -- if test prog = "$linkmode"; then -+ if test "$pass" = link; then -+ if test "$linkmode" = "prog"; then - compile_deplibs="$new_inherited_linker_flags $compile_deplibs" - finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs" - else - compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` - fi - fi -- dependency_libs=$newdependency_libs -- if test dlpreopen = "$pass"; then -+ dependency_libs="$newdependency_libs" -+ if test "$pass" = dlpreopen; then - # Link the dlpreopened libraries before other libraries - for deplib in $save_deplibs; do - deplibs="$deplib $deplibs" - done - fi -- if test dlopen != "$pass"; then -- test conv = "$pass" || { -+ if test "$pass" != dlopen; then -+ if test "$pass" != conv; then - # Make sure lib_search_path contains only unique directories. - lib_search_path= - for dir in $newlib_search_path; do -@@ -8564,12 +7121,12 @@ - esac - done - newlib_search_path= -- } -+ fi - -- if test prog,link = "$linkmode,$pass"; then -- vars="compile_deplibs finalize_deplibs" -+ if test "$linkmode,$pass" != "prog,link"; then -+ vars="deplibs" - else -- vars=deplibs -+ vars="compile_deplibs finalize_deplibs" - fi - for var in $vars dependency_libs; do - # Add libraries to $var in reverse order -@@ -8627,93 +7184,62 @@ - eval $var=\"$tmp_libs\" - done # for var - fi -- -- # Add Sun CC postdeps if required: -- test CXX = "$tagname" && { -- case $host_os in -- linux*) -- case `$CC -V 2>&1 | sed 5q` in -- *Sun\ C*) # Sun C++ 5.9 -- func_suncc_cstd_abi -- -- if test no != "$suncc_use_cstd_abi"; then -- func_append postdeps ' -library=Cstd -library=Crun' -- fi -- ;; -- esac -- ;; -- -- solaris*) -- func_cc_basename "$CC" -- case $func_cc_basename_result in -- CC* | sunCC*) -- func_suncc_cstd_abi -- -- if test no != "$suncc_use_cstd_abi"; then -- func_append postdeps ' -library=Cstd -library=Crun' -- fi -- ;; -- esac -- ;; -- esac -- } -- - # Last step: remove runtime libs from dependency_libs - # (they stay in deplibs) - tmp_libs= -- for i in $dependency_libs; do -+ for i in $dependency_libs ; do - case " $predeps $postdeps $compiler_lib_search_path " in - *" $i "*) -- i= -+ i="" - ;; - esac -- if test -n "$i"; then -+ if test -n "$i" ; then - func_append tmp_libs " $i" - fi - done - dependency_libs=$tmp_libs - done # for pass -- if test prog = "$linkmode"; then -- dlfiles=$newdlfiles -+ if test "$linkmode" = prog; then -+ dlfiles="$newdlfiles" - fi -- if test prog = "$linkmode" || test lib = "$linkmode"; then -- dlprefiles=$newdlprefiles -+ if test "$linkmode" = prog || test "$linkmode" = lib; then -+ dlprefiles="$newdlprefiles" - fi - - case $linkmode in - oldlib) -- if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then -- func_warning "'-dlopen' is ignored for archives" -+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then -+ func_warning "\`-dlopen' is ignored for archives" - fi - - case " $deplibs" in - *\ -l* | *\ -L*) -- func_warning "'-l' and '-L' are ignored for archives" ;; -+ func_warning "\`-l' and \`-L' are ignored for archives" ;; - esac - - test -n "$rpath" && \ -- func_warning "'-rpath' is ignored for archives" -+ func_warning "\`-rpath' is ignored for archives" - - test -n "$xrpath" && \ -- func_warning "'-R' is ignored for archives" -+ func_warning "\`-R' is ignored for archives" - - test -n "$vinfo" && \ -- func_warning "'-version-info/-version-number' is ignored for archives" -+ func_warning "\`-version-info/-version-number' is ignored for archives" - - test -n "$release" && \ -- func_warning "'-release' is ignored for archives" -+ func_warning "\`-release' is ignored for archives" - - test -n "$export_symbols$export_symbols_regex" && \ -- func_warning "'-export-symbols' is ignored for archives" -+ func_warning "\`-export-symbols' is ignored for archives" - - # Now set the variables for building old libraries. - build_libtool_libs=no -- oldlibs=$output -+ oldlibs="$output" - func_append objs "$old_deplibs" - ;; - - lib) -- # Make sure we only generate libraries of the form 'libNAME.la'. -+ # Make sure we only generate libraries of the form `libNAME.la'. - case $outputname in - lib*) - func_stripname 'lib' '.la' "$outputname" -@@ -8722,10 +7248,10 @@ - eval libname=\"$libname_spec\" - ;; - *) -- test no = "$module" \ -- && func_fatal_help "libtool library '$output' must begin with 'lib'" -+ test "$module" = no && \ -+ func_fatal_help "libtool library \`$output' must begin with \`lib'" - -- if test no != "$need_lib_prefix"; then -+ if test "$need_lib_prefix" != no; then - # Add the "lib" prefix for modules if required - func_stripname '' '.la' "$outputname" - name=$func_stripname_result -@@ -8739,8 +7265,8 @@ - esac - - if test -n "$objs"; then -- if test pass_all != "$deplibs_check_method"; then -- func_fatal_error "cannot build libtool library '$output' from non-libtool objects on this host:$objs" -+ if test "$deplibs_check_method" != pass_all; then -+ func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs" - else - echo - $ECHO "*** Warning: Linking the shared library $output against the non-libtool" -@@ -8749,21 +7275,21 @@ - fi - fi - -- test no = "$dlself" \ -- || func_warning "'-dlopen self' is ignored for libtool libraries" -+ test "$dlself" != no && \ -+ func_warning "\`-dlopen self' is ignored for libtool libraries" - - set dummy $rpath - shift -- test 1 -lt "$#" \ -- && func_warning "ignoring multiple '-rpath's for a libtool library" -+ test "$#" -gt 1 && \ -+ func_warning "ignoring multiple \`-rpath's for a libtool library" - -- install_libdir=$1 -+ install_libdir="$1" - - oldlibs= - if test -z "$rpath"; then -- if test yes = "$build_libtool_libs"; then -+ if test "$build_libtool_libs" = yes; then - # Building a libtool convenience library. -- # Some compilers have problems with a '.al' extension so -+ # Some compilers have problems with a `.al' extension so - # convenience libraries should have the same extension an - # archive normally would. - oldlibs="$output_objdir/$libname.$libext $oldlibs" -@@ -8772,20 +7298,20 @@ - fi - - test -n "$vinfo" && \ -- func_warning "'-version-info/-version-number' is ignored for convenience libraries" -+ func_warning "\`-version-info/-version-number' is ignored for convenience libraries" - - test -n "$release" && \ -- func_warning "'-release' is ignored for convenience libraries" -+ func_warning "\`-release' is ignored for convenience libraries" - else - - # Parse the version information argument. -- save_ifs=$IFS; IFS=: -+ save_ifs="$IFS"; IFS=':' - set dummy $vinfo 0 0 0 - shift -- IFS=$save_ifs -+ IFS="$save_ifs" - - test -n "$7" && \ -- func_fatal_help "too many parameters to '-version-info'" -+ func_fatal_help "too many parameters to \`-version-info'" - - # convert absolute version numbers to libtool ages - # this retains compatibility with .la files and attempts -@@ -8793,42 +7319,42 @@ - - case $vinfo_number in - yes) -- number_major=$1 -- number_minor=$2 -- number_revision=$3 -+ number_major="$1" -+ number_minor="$2" -+ number_revision="$3" - # - # There are really only two kinds -- those that - # use the current revision as the major version - # and those that subtract age and use age as - # a minor version. But, then there is irix -- # that has an extra 1 added just for fun -+ # which has an extra 1 added just for fun - # - case $version_type in - # correct linux to gnu/linux during the next big refactor -- darwin|freebsd-elf|linux|osf|windows|none) -+ darwin|linux|osf|windows|none) - func_arith $number_major + $number_minor - current=$func_arith_result -- age=$number_minor -- revision=$number_revision -+ age="$number_minor" -+ revision="$number_revision" - ;; -- freebsd-aout|qnx|sunos) -- current=$number_major -- revision=$number_minor -- age=0 -+ freebsd-aout|freebsd-elf|qnx|sunos) -+ current="$number_major" -+ revision="$number_minor" -+ age="0" - ;; - irix|nonstopux) - func_arith $number_major + $number_minor - current=$func_arith_result -- age=$number_minor -- revision=$number_minor -+ age="$number_minor" -+ revision="$number_minor" - lt_irix_increment=no - ;; - esac - ;; - no) -- current=$1 -- revision=$2 -- age=$3 -+ current="$1" -+ revision="$2" -+ age="$3" - ;; - esac - -@@ -8836,30 +7362,30 @@ - case $current in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) -- func_error "CURRENT '$current' must be a nonnegative integer" -- func_fatal_error "'$vinfo' is not valid version information" -+ func_error "CURRENT \`$current' must be a nonnegative integer" -+ func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - case $revision in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) -- func_error "REVISION '$revision' must be a nonnegative integer" -- func_fatal_error "'$vinfo' is not valid version information" -+ func_error "REVISION \`$revision' must be a nonnegative integer" -+ func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - case $age in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) -- func_error "AGE '$age' must be a nonnegative integer" -- func_fatal_error "'$vinfo' is not valid version information" -+ func_error "AGE \`$age' must be a nonnegative integer" -+ func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - if test "$age" -gt "$current"; then -- func_error "AGE '$age' is greater than the current interface number '$current'" -- func_fatal_error "'$vinfo' is not valid version information" -+ func_error "AGE \`$age' is greater than the current interface number \`$current'" -+ func_fatal_error "\`$vinfo' is not valid version information" - fi - - # Calculate the version variables. -@@ -8874,36 +7400,26 @@ - # verstring for coding it into the library header - func_arith $current - $age - major=.$func_arith_result -- versuffix=$major.$age.$revision -+ versuffix="$major.$age.$revision" - # Darwin ld doesn't like 0 for these options... - func_arith $current + 1 - minor_current=$func_arith_result -- xlcverstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" -+ xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision" - verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" -- # On Darwin other compilers -- case $CC in -- nagfor*) -- verstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" -- ;; -- *) -- verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" -- ;; -- esac - ;; - - freebsd-aout) -- major=.$current -- versuffix=.$current.$revision -+ major=".$current" -+ versuffix=".$current.$revision"; - ;; - - freebsd-elf) -- func_arith $current - $age -- major=.$func_arith_result -- versuffix=$major.$age.$revision -+ major=".$current" -+ versuffix=".$current" - ;; - - irix | nonstopux) -- if test no = "$lt_irix_increment"; then -+ if test "X$lt_irix_increment" = "Xno"; then - func_arith $current - $age - else - func_arith $current - $age + 1 -@@ -8914,74 +7430,69 @@ - nonstopux) verstring_prefix=nonstopux ;; - *) verstring_prefix=sgi ;; - esac -- verstring=$verstring_prefix$major.$revision -+ verstring="$verstring_prefix$major.$revision" - - # Add in all the interfaces that we are compatible with. - loop=$revision -- while test 0 -ne "$loop"; do -+ while test "$loop" -ne 0; do - func_arith $revision - $loop - iface=$func_arith_result - func_arith $loop - 1 - loop=$func_arith_result -- verstring=$verstring_prefix$major.$iface:$verstring -+ verstring="$verstring_prefix$major.$iface:$verstring" - done - -- # Before this point, $major must not contain '.'. -+ # Before this point, $major must not contain `.'. - major=.$major -- versuffix=$major.$revision -+ versuffix="$major.$revision" - ;; - - linux) # correct to gnu/linux during the next big refactor - func_arith $current - $age - major=.$func_arith_result -- versuffix=$major.$age.$revision -+ versuffix="$major.$age.$revision" - ;; - - osf) - func_arith $current - $age - major=.$func_arith_result -- versuffix=.$current.$age.$revision -- verstring=$current.$age.$revision -+ versuffix=".$current.$age.$revision" -+ verstring="$current.$age.$revision" - - # Add in all the interfaces that we are compatible with. - loop=$age -- while test 0 -ne "$loop"; do -+ while test "$loop" -ne 0; do - func_arith $current - $loop - iface=$func_arith_result - func_arith $loop - 1 - loop=$func_arith_result -- verstring=$verstring:$iface.0 -+ verstring="$verstring:${iface}.0" - done - - # Make executables depend on our current version. -- func_append verstring ":$current.0" -+ func_append verstring ":${current}.0" - ;; - - qnx) -- major=.$current -- versuffix=.$current -- ;; -- -- sco) -- major=.$current -- versuffix=.$current -+ major=".$current" -+ versuffix=".$current" - ;; - - sunos) -- major=.$current -- versuffix=.$current.$revision -+ major=".$current" -+ versuffix=".$current.$revision" - ;; - - windows) - # Use '-' rather than '.', since we only want one -- # extension on DOS 8.3 file systems. -+ # extension on DOS 8.3 filesystems. - func_arith $current - $age - major=$func_arith_result -- versuffix=-$major -+ versuffix="-$major" - ;; - - *) -- func_fatal_configuration "unknown library version type '$version_type'" -+ func_fatal_configuration "unknown library version type \`$version_type'" - ;; - esac - -@@ -8995,45 +7506,42 @@ - verstring= - ;; - *) -- verstring=0.0 -+ verstring="0.0" - ;; - esac -- if test no = "$need_version"; then -+ if test "$need_version" = no; then - versuffix= - else -- versuffix=.0.0 -+ versuffix=".0.0" - fi - fi - - # Remove version info from name if versioning should be avoided -- if test yes,no = "$avoid_version,$need_version"; then -+ if test "$avoid_version" = yes && test "$need_version" = no; then - major= - versuffix= -- verstring= -+ verstring="" - fi - - # Check to see if the archive will have undefined symbols. -- if test yes = "$allow_undefined"; then -- if test unsupported = "$allow_undefined_flag"; then -- if test yes = "$build_old_libs"; then -- func_warning "undefined symbols not allowed in $host shared libraries; building static only" -- build_libtool_libs=no -- else -- func_fatal_error "can't build $host shared library unless -no-undefined is specified" -- fi -+ if test "$allow_undefined" = yes; then -+ if test "$allow_undefined_flag" = unsupported; then -+ func_warning "undefined symbols not allowed in $host shared libraries" -+ build_libtool_libs=no -+ build_old_libs=yes - fi - else - # Don't allow undefined symbols. -- allow_undefined_flag=$no_undefined_flag -+ allow_undefined_flag="$no_undefined_flag" - fi - - fi - -- func_generate_dlsyms "$libname" "$libname" : -+ func_generate_dlsyms "$libname" "$libname" "yes" - func_append libobjs " $symfileobj" -- test " " = "$libobjs" && libobjs= -+ test "X$libobjs" = "X " && libobjs= - -- if test relink != "$opt_mode"; then -+ if test "$opt_mode" != relink; then - # Remove our outputs, but don't remove object files since they - # may have been created when compiling PIC objects. - removelist= -@@ -9042,8 +7550,8 @@ - case $p in - *.$objext | *.gcno) - ;; -- $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/$libname$release.*) -- if test -n "$precious_files_regex"; then -+ $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*) -+ if test "X$precious_files_regex" != "X"; then - if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 - then - continue -@@ -9059,11 +7567,11 @@ - fi - - # Now set the variables for building old libraries. -- if test yes = "$build_old_libs" && test convenience != "$build_libtool_libs"; then -+ if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then - func_append oldlibs " $output_objdir/$libname.$libext" - - # Transform .lo files to .o files. -- oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; $lo2o" | $NL2SP` -+ oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP` - fi - - # Eliminate all temporary directories. -@@ -9084,13 +7592,13 @@ - *) func_append finalize_rpath " $libdir" ;; - esac - done -- if test yes != "$hardcode_into_libs" || test yes = "$build_old_libs"; then -+ if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then - dependency_libs="$temp_xrpath $dependency_libs" - fi - fi - - # Make sure dlfiles contains only unique files that won't be dlpreopened -- old_dlfiles=$dlfiles -+ old_dlfiles="$dlfiles" - dlfiles= - for lib in $old_dlfiles; do - case " $dlprefiles $dlfiles " in -@@ -9100,7 +7608,7 @@ - done - - # Make sure dlprefiles contains only unique files -- old_dlprefiles=$dlprefiles -+ old_dlprefiles="$dlprefiles" - dlprefiles= - for lib in $old_dlprefiles; do - case "$dlprefiles " in -@@ -9109,7 +7617,7 @@ - esac - done - -- if test yes = "$build_libtool_libs"; then -+ if test "$build_libtool_libs" = yes; then - if test -n "$rpath"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*) -@@ -9133,7 +7641,7 @@ - ;; - *) - # Add libc to deplibs on all other systems if necessary. -- if test yes = "$build_libtool_need_lc"; then -+ if test "$build_libtool_need_lc" = "yes"; then - func_append deplibs " -lc" - fi - ;; -@@ -9149,9 +7657,9 @@ - # I'm not sure if I'm treating the release correctly. I think - # release should show up in the -l (ie -lgmp5) so we don't want to - # add it in twice. Is that correct? -- release= -- versuffix= -- major= -+ release="" -+ versuffix="" -+ major="" - newdeplibs= - droppeddeps=no - case $deplibs_check_method in -@@ -9180,20 +7688,20 @@ - -l*) - func_stripname -l '' "$i" - name=$func_stripname_result -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) - func_append newdeplibs " $i" -- i= -+ i="" - ;; - esac - fi -- if test -n "$i"; then -+ if test -n "$i" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` - set dummy $deplib_matches; shift - deplib_match=$1 -- if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0; then -+ if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then - func_append newdeplibs " $i" - else - droppeddeps=yes -@@ -9223,20 +7731,20 @@ - $opt_dry_run || $RM conftest - if $LTCC $LTCFLAGS -o conftest conftest.c $i; then - ldd_output=`ldd conftest` -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) - func_append newdeplibs " $i" -- i= -+ i="" - ;; - esac - fi -- if test -n "$i"; then -+ if test -n "$i" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` - set dummy $deplib_matches; shift - deplib_match=$1 -- if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0; then -+ if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then - func_append newdeplibs " $i" - else - droppeddeps=yes -@@ -9273,24 +7781,24 @@ - -l*) - func_stripname -l '' "$a_deplib" - name=$func_stripname_result -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) - func_append newdeplibs " $a_deplib" -- a_deplib= -+ a_deplib="" - ;; - esac - fi -- if test -n "$a_deplib"; then -+ if test -n "$a_deplib" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - if test -n "$file_magic_glob"; then - libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob` - else - libnameglob=$libname - fi -- test yes = "$want_nocaseglob" && nocaseglob=`shopt -p nocaseglob` -+ test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do -- if test yes = "$want_nocaseglob"; then -+ if test "$want_nocaseglob" = yes; then - shopt -s nocaseglob - potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` - $nocaseglob -@@ -9308,25 +7816,25 @@ - # We might still enter an endless loop, since a link - # loop can be closed while we follow links, - # but so what? -- potlib=$potent_lib -+ potlib="$potent_lib" - while test -h "$potlib" 2>/dev/null; do -- potliblink=`ls -ld $potlib | $SED 's/.* -> //'` -+ potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'` - case $potliblink in -- [\\/]* | [A-Za-z]:[\\/]*) potlib=$potliblink;; -- *) potlib=`$ECHO "$potlib" | $SED 's|[^/]*$||'`"$potliblink";; -+ [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";; -+ *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";; - esac - done - if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | - $SED -e 10q | - $EGREP "$file_magic_regex" > /dev/null; then - func_append newdeplibs " $a_deplib" -- a_deplib= -+ a_deplib="" - break 2 - fi - done - done - fi -- if test -n "$a_deplib"; then -+ if test -n "$a_deplib" ; then - droppeddeps=yes - echo - $ECHO "*** Warning: linker path does not have real file for library $a_deplib." -@@ -9334,7 +7842,7 @@ - echo "*** you link to this library. But I can only do this if you have a" - echo "*** shared version of the library, which you do not appear to have" - echo "*** because I did check the linker path looking for a file starting" -- if test -z "$potlib"; then -+ if test -z "$potlib" ; then - $ECHO "*** with $libname but no candidates were found. (...for file magic test)" - else - $ECHO "*** with $libname and none of the candidates passed a file format test" -@@ -9357,30 +7865,30 @@ - -l*) - func_stripname -l '' "$a_deplib" - name=$func_stripname_result -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) - func_append newdeplibs " $a_deplib" -- a_deplib= -+ a_deplib="" - ;; - esac - fi -- if test -n "$a_deplib"; then -+ if test -n "$a_deplib" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do - potential_libs=`ls $i/$libname[.-]* 2>/dev/null` - for potent_lib in $potential_libs; do -- potlib=$potent_lib # see symlink-check above in file_magic test -+ potlib="$potent_lib" # see symlink-check above in file_magic test - if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \ - $EGREP "$match_pattern_regex" > /dev/null; then - func_append newdeplibs " $a_deplib" -- a_deplib= -+ a_deplib="" - break 2 - fi - done - done - fi -- if test -n "$a_deplib"; then -+ if test -n "$a_deplib" ; then - droppeddeps=yes - echo - $ECHO "*** Warning: linker path does not have real file for library $a_deplib." -@@ -9388,7 +7896,7 @@ - echo "*** you link to this library. But I can only do this if you have a" - echo "*** shared version of the library, which you do not appear to have" - echo "*** because I did check the linker path looking for a file starting" -- if test -z "$potlib"; then -+ if test -z "$potlib" ; then - $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)" - else - $ECHO "*** with $libname and none of the candidates passed a file format test" -@@ -9404,18 +7912,18 @@ - done # Gone through all deplibs. - ;; - none | unknown | *) -- newdeplibs= -+ newdeplibs="" - tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'` -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -- for i in $predeps $postdeps; do -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then -+ for i in $predeps $postdeps ; do - # can't use Xsed below, because $i might contain '/' -- tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s|$i||"` -+ tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"` - done - fi - case $tmp_deplibs in - *[!\ \ ]*) - echo -- if test none = "$deplibs_check_method"; then -+ if test "X$deplibs_check_method" = "Xnone"; then - echo "*** Warning: inter-library dependencies are not supported in this platform." - else - echo "*** Warning: inter-library dependencies are not known to be supported." -@@ -9439,8 +7947,8 @@ - ;; - esac - -- if test yes = "$droppeddeps"; then -- if test yes = "$module"; then -+ if test "$droppeddeps" = yes; then -+ if test "$module" = yes; then - echo - echo "*** Warning: libtool could not satisfy all declared inter-library" - $ECHO "*** dependencies of module $libname. Therefore, libtool will create" -@@ -9449,12 +7957,12 @@ - if test -z "$global_symbol_pipe"; then - echo - echo "*** However, this would only work if libtool was able to extract symbol" -- echo "*** lists from a program, using 'nm' or equivalent, but libtool could" -+ echo "*** lists from a program, using \`nm' or equivalent, but libtool could" - echo "*** not find such a program. So, this module is probably useless." -- echo "*** 'nm' from GNU binutils and a full rebuild may help." -+ echo "*** \`nm' from GNU binutils and a full rebuild may help." - fi -- if test no = "$build_old_libs"; then -- oldlibs=$output_objdir/$libname.$libext -+ if test "$build_old_libs" = no; then -+ oldlibs="$output_objdir/$libname.$libext" - build_libtool_libs=module - build_old_libs=yes - else -@@ -9465,14 +7973,14 @@ - echo "*** automatically added whenever a program is linked with this library" - echo "*** or is declared to -dlopen it." - -- if test no = "$allow_undefined"; then -+ if test "$allow_undefined" = no; then - echo - echo "*** Since this library must not contain undefined symbols," - echo "*** because either the platform does not support them or" - echo "*** it was explicitly requested with -no-undefined," - echo "*** libtool will only create a static version of it." -- if test no = "$build_old_libs"; then -- oldlibs=$output_objdir/$libname.$libext -+ if test "$build_old_libs" = no; then -+ oldlibs="$output_objdir/$libname.$libext" - build_libtool_libs=module - build_old_libs=yes - else -@@ -9518,7 +8026,7 @@ - *) func_append new_libs " $deplib" ;; - esac - done -- deplibs=$new_libs -+ deplibs="$new_libs" - - # All the library-specific variables (install_libdir is set above). - library_names= -@@ -9526,25 +8034,25 @@ - dlname= - - # Test again, we may have decided not to build it any more -- if test yes = "$build_libtool_libs"; then -- # Remove $wl instances when linking with ld. -+ if test "$build_libtool_libs" = yes; then -+ # Remove ${wl} instances when linking with ld. - # FIXME: should test the right _cmds variable. - case $archive_cmds in - *\$LD\ *) wl= ;; - esac -- if test yes = "$hardcode_into_libs"; then -+ if test "$hardcode_into_libs" = yes; then - # Hardcode the library paths - hardcode_libdirs= - dep_rpath= -- rpath=$finalize_rpath -- test relink = "$opt_mode" || rpath=$compile_rpath$rpath -+ rpath="$finalize_rpath" -+ test "$opt_mode" != relink && rpath="$compile_rpath$rpath" - for libdir in $rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - func_replace_sysroot "$libdir" - libdir=$func_replace_sysroot_result - if test -z "$hardcode_libdirs"; then -- hardcode_libdirs=$libdir -+ hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in -@@ -9569,7 +8077,7 @@ - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then -- libdir=$hardcode_libdirs -+ libdir="$hardcode_libdirs" - eval "dep_rpath=\"$hardcode_libdir_flag_spec\"" - fi - if test -n "$runpath_var" && test -n "$perm_rpath"; then -@@ -9583,8 +8091,8 @@ - test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" - fi - -- shlibpath=$finalize_shlibpath -- test relink = "$opt_mode" || shlibpath=$compile_shlibpath$shlibpath -+ shlibpath="$finalize_shlibpath" -+ test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath" - if test -n "$shlibpath"; then - eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" - fi -@@ -9594,19 +8102,19 @@ - eval library_names=\"$library_names_spec\" - set dummy $library_names - shift -- realname=$1 -+ realname="$1" - shift - - if test -n "$soname_spec"; then - eval soname=\"$soname_spec\" - else -- soname=$realname -+ soname="$realname" - fi - if test -z "$dlname"; then - dlname=$soname - fi - -- lib=$output_objdir/$realname -+ lib="$output_objdir/$realname" - linknames= - for link - do -@@ -9620,7 +8128,7 @@ - delfiles= - if test -n "$export_symbols" && test -n "$include_expsyms"; then - $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" -- export_symbols=$output_objdir/$libname.uexp -+ export_symbols="$output_objdir/$libname.uexp" - func_append delfiles " $export_symbols" - fi - -@@ -9629,31 +8137,31 @@ - cygwin* | mingw* | cegcc*) - if test -n "$export_symbols" && test -z "$export_symbols_regex"; then - # exporting using user supplied symfile -- func_dll_def_p "$export_symbols" || { -+ if test "x`$SED 1q $export_symbols`" != xEXPORTS; then - # and it's NOT already a .def file. Must figure out - # which of the given symbols are data symbols and tag - # them as such. So, trigger use of export_symbols_cmds. - # export_symbols gets reassigned inside the "prepare - # the list of exported symbols" if statement, so the - # include_expsyms logic still works. -- orig_export_symbols=$export_symbols -+ orig_export_symbols="$export_symbols" - export_symbols= - always_export_symbols=yes -- } -+ fi - fi - ;; - esac - - # Prepare the list of exported symbols - if test -z "$export_symbols"; then -- if test yes = "$always_export_symbols" || test -n "$export_symbols_regex"; then -- func_verbose "generating symbol list for '$libname.la'" -- export_symbols=$output_objdir/$libname.exp -+ if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then -+ func_verbose "generating symbol list for \`$libname.la'" -+ export_symbols="$output_objdir/$libname.exp" - $opt_dry_run || $RM $export_symbols - cmds=$export_symbols_cmds -- save_ifs=$IFS; IFS='~' -+ save_ifs="$IFS"; IFS='~' - for cmd1 in $cmds; do -- IFS=$save_ifs -+ IFS="$save_ifs" - # Take the normal branch if the nm_file_list_spec branch - # doesn't work or if tool conversion is not needed. - case $nm_file_list_spec~$to_tool_file_cmd in -@@ -9667,7 +8175,7 @@ - try_normal_branch=no - ;; - esac -- if test yes = "$try_normal_branch" \ -+ if test "$try_normal_branch" = yes \ - && { test "$len" -lt "$max_cmd_len" \ - || test "$max_cmd_len" -le -1; } - then -@@ -9678,7 +8186,7 @@ - output_la=$func_basename_result - save_libobjs=$libobjs - save_output=$output -- output=$output_objdir/$output_la.nm -+ output=${output_objdir}/${output_la}.nm - func_to_tool_file "$output" - libobjs=$nm_file_list_spec$func_to_tool_file_result - func_append delfiles " $output" -@@ -9701,8 +8209,8 @@ - break - fi - done -- IFS=$save_ifs -- if test -n "$export_symbols_regex" && test : != "$skipped_export"; then -+ IFS="$save_ifs" -+ if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then - func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' - func_show_eval '$MV "${export_symbols}T" "$export_symbols"' - fi -@@ -9710,16 +8218,16 @@ - fi - - if test -n "$export_symbols" && test -n "$include_expsyms"; then -- tmp_export_symbols=$export_symbols -- test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols -+ tmp_export_symbols="$export_symbols" -+ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" - $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' - fi - -- if test : != "$skipped_export" && test -n "$orig_export_symbols"; then -+ if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then - # The given exports_symbols file has to be filtered, so filter it. -- func_verbose "filter symbol list for '$libname.la' to tag DATA exports" -+ func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" - # FIXME: $output_objdir/$libname.filter potentially contains lots of -- # 's' commands, which not all seds can handle. GNU sed should be fine -+ # 's' commands which not all seds can handle. GNU sed should be fine - # though. Also, the filter scales superlinearly with the number of - # global variables. join(1) would be nice here, but unfortunately - # isn't a blessed tool. -@@ -9738,11 +8246,11 @@ - ;; - esac - done -- deplibs=$tmp_deplibs -+ deplibs="$tmp_deplibs" - - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec" && -- test yes = "$compiler_needs_object" && -+ test "$compiler_needs_object" = yes && - test -z "$libobjs"; then - # extract the archives, so we have objects to list. - # TODO: could optimize this to just extract one archive. -@@ -9753,7 +8261,7 @@ - eval libobjs=\"\$libobjs $whole_archive_flag_spec\" - test "X$libobjs" = "X " && libobjs= - else -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - - func_extract_archives $gentop $convenience -@@ -9762,18 +8270,18 @@ - fi - fi - -- if test yes = "$thread_safe" && test -n "$thread_safe_flag_spec"; then -+ if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then - eval flag=\"$thread_safe_flag_spec\" - func_append linker_flags " $flag" - fi - - # Make a backup of the uninstalled library when relinking -- if test relink = "$opt_mode"; then -+ if test "$opt_mode" = relink; then - $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? - fi - - # Do each of the archive commands. -- if test yes = "$module" && test -n "$module_cmds"; then -+ if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then - eval test_cmds=\"$module_expsym_cmds\" - cmds=$module_expsym_cmds -@@ -9791,7 +8299,7 @@ - fi - fi - -- if test : != "$skipped_export" && -+ if test "X$skipped_export" != "X:" && - func_len " $test_cmds" && - len=$func_len_result && - test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then -@@ -9824,8 +8332,8 @@ - last_robj= - k=1 - -- if test -n "$save_libobjs" && test : != "$skipped_export" && test yes = "$with_gnu_ld"; then -- output=$output_objdir/$output_la.lnkscript -+ if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then -+ output=${output_objdir}/${output_la}.lnkscript - func_verbose "creating GNU ld script: $output" - echo 'INPUT (' > $output - for obj in $save_libobjs -@@ -9837,14 +8345,14 @@ - func_append delfiles " $output" - func_to_tool_file "$output" - output=$func_to_tool_file_result -- elif test -n "$save_libobjs" && test : != "$skipped_export" && test -n "$file_list_spec"; then -- output=$output_objdir/$output_la.lnk -+ elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then -+ output=${output_objdir}/${output_la}.lnk - func_verbose "creating linker input file list: $output" - : > $output - set x $save_libobjs - shift - firstobj= -- if test yes = "$compiler_needs_object"; then -+ if test "$compiler_needs_object" = yes; then - firstobj="$1 " - shift - fi -@@ -9859,7 +8367,7 @@ - else - if test -n "$save_libobjs"; then - func_verbose "creating reloadable object files..." -- output=$output_objdir/$output_la-$k.$objext -+ output=$output_objdir/$output_la-${k}.$objext - eval test_cmds=\"$reload_cmds\" - func_len " $test_cmds" - len0=$func_len_result -@@ -9871,13 +8379,13 @@ - func_len " $obj" - func_arith $len + $func_len_result - len=$func_arith_result -- if test -z "$objlist" || -+ if test "X$objlist" = X || - test "$len" -lt "$max_cmd_len"; then - func_append objlist " $obj" - else - # The command $test_cmds is almost too long, add a - # command to the queue. -- if test 1 -eq "$k"; then -+ if test "$k" -eq 1 ; then - # The first file doesn't have a previous command to add. - reload_objs=$objlist - eval concat_cmds=\"$reload_cmds\" -@@ -9887,10 +8395,10 @@ - reload_objs="$objlist $last_robj" - eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\" - fi -- last_robj=$output_objdir/$output_la-$k.$objext -+ last_robj=$output_objdir/$output_la-${k}.$objext - func_arith $k + 1 - k=$func_arith_result -- output=$output_objdir/$output_la-$k.$objext -+ output=$output_objdir/$output_la-${k}.$objext - objlist=" $obj" - func_len " $last_robj" - func_arith $len0 + $func_len_result -@@ -9902,9 +8410,9 @@ - # files will link in the last one created. - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ - reload_objs="$objlist $last_robj" -- eval concat_cmds=\"\$concat_cmds$reload_cmds\" -+ eval concat_cmds=\"\${concat_cmds}$reload_cmds\" - if test -n "$last_robj"; then -- eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" -+ eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\" - fi - func_append delfiles " $output" - -@@ -9912,9 +8420,9 @@ - output= - fi - -- ${skipped_export-false} && { -- func_verbose "generating symbol list for '$libname.la'" -- export_symbols=$output_objdir/$libname.exp -+ if ${skipped_export-false}; then -+ func_verbose "generating symbol list for \`$libname.la'" -+ export_symbols="$output_objdir/$libname.exp" - $opt_dry_run || $RM $export_symbols - libobjs=$output - # Append the command to create the export file. -@@ -9923,16 +8431,16 @@ - if test -n "$last_robj"; then - eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" - fi -- } -+ fi - - test -n "$save_libobjs" && - func_verbose "creating a temporary reloadable object file: $output" - - # Loop through the commands generated above and execute them. -- save_ifs=$IFS; IFS='~' -+ save_ifs="$IFS"; IFS='~' - for cmd in $concat_cmds; do -- IFS=$save_ifs -- $opt_quiet || { -+ IFS="$save_ifs" -+ $opt_silent || { - func_quote_for_expand "$cmd" - eval "func_echo $func_quote_for_expand_result" - } -@@ -9940,7 +8448,7 @@ - lt_exit=$? - - # Restore the uninstalled library and exit -- if test relink = "$opt_mode"; then -+ if test "$opt_mode" = relink; then - ( cd "$output_objdir" && \ - $RM "${realname}T" && \ - $MV "${realname}U" "$realname" ) -@@ -9949,7 +8457,7 @@ - exit $lt_exit - } - done -- IFS=$save_ifs -+ IFS="$save_ifs" - - if test -n "$export_symbols_regex" && ${skipped_export-false}; then - func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' -@@ -9957,18 +8465,18 @@ - fi - fi - -- ${skipped_export-false} && { -+ if ${skipped_export-false}; then - if test -n "$export_symbols" && test -n "$include_expsyms"; then -- tmp_export_symbols=$export_symbols -- test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols -+ tmp_export_symbols="$export_symbols" -+ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" - $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' - fi - - if test -n "$orig_export_symbols"; then - # The given exports_symbols file has to be filtered, so filter it. -- func_verbose "filter symbol list for '$libname.la' to tag DATA exports" -+ func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" - # FIXME: $output_objdir/$libname.filter potentially contains lots of -- # 's' commands, which not all seds can handle. GNU sed should be fine -+ # 's' commands which not all seds can handle. GNU sed should be fine - # though. Also, the filter scales superlinearly with the number of - # global variables. join(1) would be nice here, but unfortunately - # isn't a blessed tool. -@@ -9977,7 +8485,7 @@ - export_symbols=$output_objdir/$libname.def - $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols - fi -- } -+ fi - - libobjs=$output - # Restore the value of output. -@@ -9991,7 +8499,7 @@ - # value of $libobjs for piecewise linking. - - # Do each of the archive commands. -- if test yes = "$module" && test -n "$module_cmds"; then -+ if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then - cmds=$module_expsym_cmds - else -@@ -10013,7 +8521,7 @@ - - # Add any objects from preloaded convenience libraries - if test -n "$dlprefiles"; then -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - - func_extract_archives $gentop $dlprefiles -@@ -10021,12 +8529,11 @@ - test "X$libobjs" = "X " && libobjs= - fi - -- save_ifs=$IFS; IFS='~' -+ save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do -- IFS=$sp$nl -+ IFS="$save_ifs" - eval cmd=\"$cmd\" -- IFS=$save_ifs -- $opt_quiet || { -+ $opt_silent || { - func_quote_for_expand "$cmd" - eval "func_echo $func_quote_for_expand_result" - } -@@ -10034,7 +8541,7 @@ - lt_exit=$? - - # Restore the uninstalled library and exit -- if test relink = "$opt_mode"; then -+ if test "$opt_mode" = relink; then - ( cd "$output_objdir" && \ - $RM "${realname}T" && \ - $MV "${realname}U" "$realname" ) -@@ -10043,10 +8550,10 @@ - exit $lt_exit - } - done -- IFS=$save_ifs -+ IFS="$save_ifs" - - # Restore the uninstalled library and exit -- if test relink = "$opt_mode"; then -+ if test "$opt_mode" = relink; then - $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? - - if test -n "$convenience"; then -@@ -10066,39 +8573,39 @@ - done - - # If -module or -export-dynamic was specified, set the dlname. -- if test yes = "$module" || test yes = "$export_dynamic"; then -+ if test "$module" = yes || test "$export_dynamic" = yes; then - # On all known operating systems, these are identical. -- dlname=$soname -+ dlname="$soname" - fi - fi - ;; - - obj) -- if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then -- func_warning "'-dlopen' is ignored for objects" -+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then -+ func_warning "\`-dlopen' is ignored for objects" - fi - - case " $deplibs" in - *\ -l* | *\ -L*) -- func_warning "'-l' and '-L' are ignored for objects" ;; -+ func_warning "\`-l' and \`-L' are ignored for objects" ;; - esac - - test -n "$rpath" && \ -- func_warning "'-rpath' is ignored for objects" -+ func_warning "\`-rpath' is ignored for objects" - - test -n "$xrpath" && \ -- func_warning "'-R' is ignored for objects" -+ func_warning "\`-R' is ignored for objects" - - test -n "$vinfo" && \ -- func_warning "'-version-info' is ignored for objects" -+ func_warning "\`-version-info' is ignored for objects" - - test -n "$release" && \ -- func_warning "'-release' is ignored for objects" -+ func_warning "\`-release' is ignored for objects" - - case $output in - *.lo) - test -n "$objs$old_deplibs" && \ -- func_fatal_error "cannot build library object '$output' from non-libtool objects" -+ func_fatal_error "cannot build library object \`$output' from non-libtool objects" - - libobj=$output - func_lo2o "$libobj" -@@ -10106,7 +8613,7 @@ - ;; - *) - libobj= -- obj=$output -+ obj="$output" - ;; - esac - -@@ -10119,19 +8626,17 @@ - # the extraction. - reload_conv_objs= - gentop= -- # if reload_cmds runs $LD directly, get rid of -Wl from -- # whole_archive_flag_spec and hope we can get by with turning comma -- # into space. -- case $reload_cmds in -- *\$LD[\ \$]*) wl= ;; -- esac -+ # reload_cmds runs $LD directly, so let us get rid of -+ # -Wl from whole_archive_flag_spec and hope we can get by with -+ # turning comma into space.. -+ wl= -+ - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec"; then - eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" -- test -n "$wl" || tmp_whole_archive_flags=`$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` -- reload_conv_objs=$reload_objs\ $tmp_whole_archive_flags -+ reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` - else -- gentop=$output_objdir/${obj}x -+ gentop="$output_objdir/${obj}x" - func_append generated " $gentop" - - func_extract_archives $gentop $convenience -@@ -10140,12 +8645,12 @@ - fi - - # If we're not building shared, we need to use non_pic_objs -- test yes = "$build_libtool_libs" || libobjs=$non_pic_objects -+ test "$build_libtool_libs" != yes && libobjs="$non_pic_objects" - - # Create the old-style object. -- reload_objs=$objs$old_deplibs' '`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; /\.lib$/d; $lo2o" | $NL2SP`' '$reload_conv_objs -+ reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test - -- output=$obj -+ output="$obj" - func_execute_cmds "$reload_cmds" 'exit $?' - - # Exit if we aren't doing a library object file. -@@ -10157,7 +8662,7 @@ - exit $EXIT_SUCCESS - fi - -- test yes = "$build_libtool_libs" || { -+ if test "$build_libtool_libs" != yes; then - if test -n "$gentop"; then - func_show_eval '${RM}r "$gentop"' - fi -@@ -10167,12 +8672,12 @@ - # $show "echo timestamp > $libobj" - # $opt_dry_run || eval "echo timestamp > $libobj" || exit $? - exit $EXIT_SUCCESS -- } -+ fi - -- if test -n "$pic_flag" || test default != "$pic_mode"; then -+ if test -n "$pic_flag" || test "$pic_mode" != default; then - # Only do commands if we really have different PIC objects. - reload_objs="$libobjs $reload_conv_objs" -- output=$libobj -+ output="$libobj" - func_execute_cmds "$reload_cmds" 'exit $?' - fi - -@@ -10189,14 +8694,16 @@ - output=$func_stripname_result.exe;; - esac - test -n "$vinfo" && \ -- func_warning "'-version-info' is ignored for programs" -+ func_warning "\`-version-info' is ignored for programs" - - test -n "$release" && \ -- func_warning "'-release' is ignored for programs" -+ func_warning "\`-release' is ignored for programs" - -- $preload \ -- && test unknown,unknown,unknown = "$dlopen_support,$dlopen_self,$dlopen_self_static" \ -- && func_warning "'LT_INIT([dlopen])' not used. Assuming no dlopen support." -+ test "$preload" = yes \ -+ && test "$dlopen_support" = unknown \ -+ && test "$dlopen_self" = unknown \ -+ && test "$dlopen_self_static" = unknown && \ -+ func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support." - - case $host in - *-*-rhapsody* | *-*-darwin1.[012]) -@@ -10210,11 +8717,11 @@ - *-*-darwin*) - # Don't allow lazy linking, it breaks C++ global constructors - # But is supposedly fixed on 10.4 or later (yay!). -- if test CXX = "$tagname"; then -+ if test "$tagname" = CXX ; then - case ${MACOSX_DEPLOYMENT_TARGET-10.0} in - 10.[0123]) -- func_append compile_command " $wl-bind_at_load" -- func_append finalize_command " $wl-bind_at_load" -+ func_append compile_command " ${wl}-bind_at_load" -+ func_append finalize_command " ${wl}-bind_at_load" - ;; - esac - fi -@@ -10250,7 +8757,7 @@ - *) func_append new_libs " $deplib" ;; - esac - done -- compile_deplibs=$new_libs -+ compile_deplibs="$new_libs" - - - func_append compile_command " $compile_deplibs" -@@ -10274,7 +8781,7 @@ - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then -- hardcode_libdirs=$libdir -+ hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in -@@ -10297,7 +8804,7 @@ - fi - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) -- testbindir=`$ECHO "$libdir" | $SED -e 's*/lib$*/bin*'` -+ testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'` - case :$dllsearchpath: in - *":$libdir:"*) ;; - ::) dllsearchpath=$libdir;; -@@ -10314,10 +8821,10 @@ - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then -- libdir=$hardcode_libdirs -+ libdir="$hardcode_libdirs" - eval rpath=\" $hardcode_libdir_flag_spec\" - fi -- compile_rpath=$rpath -+ compile_rpath="$rpath" - - rpath= - hardcode_libdirs= -@@ -10325,7 +8832,7 @@ - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then -- hardcode_libdirs=$libdir -+ hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in -@@ -10350,43 +8857,45 @@ - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then -- libdir=$hardcode_libdirs -+ libdir="$hardcode_libdirs" - eval rpath=\" $hardcode_libdir_flag_spec\" - fi -- finalize_rpath=$rpath -+ finalize_rpath="$rpath" - -- if test -n "$libobjs" && test yes = "$build_old_libs"; then -+ if test -n "$libobjs" && test "$build_old_libs" = yes; then - # Transform all the library objects into standard objects. - compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP` - finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP` - fi - -- func_generate_dlsyms "$outputname" "@PROGRAM@" false -+ func_generate_dlsyms "$outputname" "@PROGRAM@" "no" - - # template prelinking step - if test -n "$prelink_cmds"; then - func_execute_cmds "$prelink_cmds" 'exit $?' - fi - -- wrappers_required=: -+ wrappers_required=yes - case $host in - *cegcc* | *mingw32ce*) - # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway. -- wrappers_required=false -+ wrappers_required=no - ;; - *cygwin* | *mingw* ) -- test yes = "$build_libtool_libs" || wrappers_required=false -+ if test "$build_libtool_libs" != yes; then -+ wrappers_required=no -+ fi - ;; - *) -- if test no = "$need_relink" || test yes != "$build_libtool_libs"; then -- wrappers_required=false -+ if test "$need_relink" = no || test "$build_libtool_libs" != yes; then -+ wrappers_required=no - fi - ;; - esac -- $wrappers_required || { -+ if test "$wrappers_required" = no; then - # Replace the output file specification. - compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'` -- link_command=$compile_command$compile_rpath -+ link_command="$compile_command$compile_rpath" - - # We have no uninstalled library dependencies, so finalize right now. - exit_status=0 -@@ -10399,12 +8908,12 @@ - fi - - # Delete the generated files. -- if test -f "$output_objdir/${outputname}S.$objext"; then -- func_show_eval '$RM "$output_objdir/${outputname}S.$objext"' -+ if test -f "$output_objdir/${outputname}S.${objext}"; then -+ func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' - fi - - exit $exit_status -- } -+ fi - - if test -n "$compile_shlibpath$finalize_shlibpath"; then - compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" -@@ -10434,9 +8943,9 @@ - fi - fi - -- if test yes = "$no_install"; then -+ if test "$no_install" = yes; then - # We don't need to create a wrapper script. -- link_command=$compile_var$compile_command$compile_rpath -+ link_command="$compile_var$compile_command$compile_rpath" - # Replace the output file specification. - link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'` - # Delete the old output file. -@@ -10453,28 +8962,27 @@ - exit $EXIT_SUCCESS - fi - -- case $hardcode_action,$fast_install in -- relink,*) -- # Fast installation is not supported -- link_command=$compile_var$compile_command$compile_rpath -- relink_command=$finalize_var$finalize_command$finalize_rpath -- -- func_warning "this platform does not like uninstalled shared libraries" -- func_warning "'$output' will be relinked during installation" -- ;; -- *,yes) -- link_command=$finalize_var$compile_command$finalize_rpath -- relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'` -- ;; -- *,no) -- link_command=$compile_var$compile_command$compile_rpath -- relink_command=$finalize_var$finalize_command$finalize_rpath -- ;; -- *,needless) -- link_command=$finalize_var$compile_command$finalize_rpath -- relink_command= -- ;; -- esac -+ if test "$hardcode_action" = relink; then -+ # Fast installation is not supported -+ link_command="$compile_var$compile_command$compile_rpath" -+ relink_command="$finalize_var$finalize_command$finalize_rpath" -+ -+ func_warning "this platform does not like uninstalled shared libraries" -+ func_warning "\`$output' will be relinked during installation" -+ else -+ if test "$fast_install" != no; then -+ link_command="$finalize_var$compile_command$finalize_rpath" -+ if test "$fast_install" = yes; then -+ relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'` -+ else -+ # fast_install is set to needless -+ relink_command= -+ fi -+ else -+ link_command="$compile_var$compile_command$compile_rpath" -+ relink_command="$finalize_var$finalize_command$finalize_rpath" -+ fi -+ fi - - # Replace the output file specification. - link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` -@@ -10531,8 +9039,8 @@ - func_dirname_and_basename "$output" "" "." - output_name=$func_basename_result - output_path=$func_dirname_result -- cwrappersource=$output_path/$objdir/lt-$output_name.c -- cwrapper=$output_path/$output_name.exe -+ cwrappersource="$output_path/$objdir/lt-$output_name.c" -+ cwrapper="$output_path/$output_name.exe" - $RM $cwrappersource $cwrapper - trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 - -@@ -10553,7 +9061,7 @@ - trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15 - $opt_dry_run || { - # note: this script will not be executed, so do not chmod. -- if test "x$build" = "x$host"; then -+ if test "x$build" = "x$host" ; then - $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result - else - func_emit_wrapper no > $func_ltwrapper_scriptname_result -@@ -10576,27 +9084,25 @@ - # See if we need to build an old-fashioned archive. - for oldlib in $oldlibs; do - -- case $build_libtool_libs in -- convenience) -- oldobjs="$libobjs_save $symfileobj" -- addlibs=$convenience -- build_libtool_libs=no -- ;; -- module) -- oldobjs=$libobjs_save -- addlibs=$old_convenience -+ if test "$build_libtool_libs" = convenience; then -+ oldobjs="$libobjs_save $symfileobj" -+ addlibs="$convenience" -+ build_libtool_libs=no -+ else -+ if test "$build_libtool_libs" = module; then -+ oldobjs="$libobjs_save" - build_libtool_libs=no -- ;; -- *) -+ else - oldobjs="$old_deplibs $non_pic_objects" -- $preload && test -f "$symfileobj" \ -- && func_append oldobjs " $symfileobj" -- addlibs=$old_convenience -- ;; -- esac -+ if test "$preload" = yes && test -f "$symfileobj"; then -+ func_append oldobjs " $symfileobj" -+ fi -+ fi -+ addlibs="$old_convenience" -+ fi - - if test -n "$addlibs"; then -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - - func_extract_archives $gentop $addlibs -@@ -10604,13 +9110,13 @@ - fi - - # Do each command in the archive commands. -- if test -n "$old_archive_from_new_cmds" && test yes = "$build_libtool_libs"; then -+ if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then - cmds=$old_archive_from_new_cmds - else - - # Add any objects from preloaded convenience libraries - if test -n "$dlprefiles"; then -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - - func_extract_archives $gentop $dlprefiles -@@ -10631,7 +9137,7 @@ - : - else - echo "copying selected object files to avoid basename conflicts..." -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - func_mkdir_p "$gentop" - save_oldobjs=$oldobjs -@@ -10640,7 +9146,7 @@ - for obj in $save_oldobjs - do - func_basename "$obj" -- objbase=$func_basename_result -+ objbase="$func_basename_result" - case " $oldobjs " in - " ") oldobjs=$obj ;; - *[\ /]"$objbase "*) -@@ -10709,18 +9215,18 @@ - else - # the above command should be used before it gets too long - oldobjs=$objlist -- if test "$obj" = "$last_oldobj"; then -+ if test "$obj" = "$last_oldobj" ; then - RANLIB=$save_RANLIB - fi - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ -- eval concat_cmds=\"\$concat_cmds$old_archive_cmds\" -+ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" - objlist= - len=$len0 - fi - done - RANLIB=$save_RANLIB - oldobjs=$objlist -- if test -z "$oldobjs"; then -+ if test "X$oldobjs" = "X" ; then - eval cmds=\"\$concat_cmds\" - else - eval cmds=\"\$concat_cmds~\$old_archive_cmds\" -@@ -10737,7 +9243,7 @@ - case $output in - *.la) - old_library= -- test yes = "$build_old_libs" && old_library=$libname.$libext -+ test "$build_old_libs" = yes && old_library="$libname.$libext" - func_verbose "creating $output" - - # Preserve any variables that may affect compiler behavior -@@ -10752,31 +9258,31 @@ - fi - done - # Quote the link command for shipping. -- relink_command="(cd `pwd`; $SHELL \"$progpath\" $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" -+ relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" - relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"` -- if test yes = "$hardcode_automatic"; then -+ if test "$hardcode_automatic" = yes ; then - relink_command= - fi - - # Only create the output if not a dry run. - $opt_dry_run || { - for installed in no yes; do -- if test yes = "$installed"; then -+ if test "$installed" = yes; then - if test -z "$install_libdir"; then - break - fi -- output=$output_objdir/${outputname}i -+ output="$output_objdir/$outputname"i - # Replace all uninstalled libtool libraries with the installed ones - newdependency_libs= - for deplib in $dependency_libs; do - case $deplib in - *.la) - func_basename "$deplib" -- name=$func_basename_result -+ name="$func_basename_result" - func_resolve_sysroot "$deplib" -- eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` - test -z "$libdir" && \ -- func_fatal_error "'$deplib' is not a valid libtool archive" -+ func_fatal_error "\`$deplib' is not a valid libtool archive" - func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name" - ;; - -L*) -@@ -10792,23 +9298,23 @@ - *) func_append newdependency_libs " $deplib" ;; - esac - done -- dependency_libs=$newdependency_libs -+ dependency_libs="$newdependency_libs" - newdlfiles= - - for lib in $dlfiles; do - case $lib in - *.la) - func_basename "$lib" -- name=$func_basename_result -- eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib` -+ name="$func_basename_result" -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` - test -z "$libdir" && \ -- func_fatal_error "'$lib' is not a valid libtool archive" -+ func_fatal_error "\`$lib' is not a valid libtool archive" - func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name" - ;; - *) func_append newdlfiles " $lib" ;; - esac - done -- dlfiles=$newdlfiles -+ dlfiles="$newdlfiles" - newdlprefiles= - for lib in $dlprefiles; do - case $lib in -@@ -10818,34 +9324,34 @@ - # didn't already link the preopened objects directly into - # the library: - func_basename "$lib" -- name=$func_basename_result -- eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib` -+ name="$func_basename_result" -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` - test -z "$libdir" && \ -- func_fatal_error "'$lib' is not a valid libtool archive" -+ func_fatal_error "\`$lib' is not a valid libtool archive" - func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name" - ;; - esac - done -- dlprefiles=$newdlprefiles -+ dlprefiles="$newdlprefiles" - else - newdlfiles= - for lib in $dlfiles; do - case $lib in -- [\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;; -+ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac - func_append newdlfiles " $abs" - done -- dlfiles=$newdlfiles -+ dlfiles="$newdlfiles" - newdlprefiles= - for lib in $dlprefiles; do - case $lib in -- [\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;; -+ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac - func_append newdlprefiles " $abs" - done -- dlprefiles=$newdlprefiles -+ dlprefiles="$newdlprefiles" - fi - $RM $output - # place dlname in correct position for cygwin -@@ -10861,9 +9367,10 @@ - case $host,$output,$installed,$module,$dlname in - *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll) - # If a -bindir argument was supplied, place the dll there. -- if test -n "$bindir"; then -+ if test "x$bindir" != x ; -+ then - func_relative_path "$install_libdir" "$bindir" -- tdlname=$func_relative_path_result/$dlname -+ tdlname=$func_relative_path_result$dlname - else - # Otherwise fall back on heuristic. - tdlname=../bin/$dlname -@@ -10872,7 +9379,7 @@ - esac - $ECHO > $output "\ - # $outputname - a libtool library file --# Generated by $PROGRAM (GNU $PACKAGE) $VERSION -+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - # - # Please DO NOT delete this file! - # It is necessary for linking the library. -@@ -10886,7 +9393,7 @@ - # The name of the static archive. - old_library='$old_library' - --# Linker flags that cannot go in dependency_libs. -+# Linker flags that can not go in dependency_libs. - inherited_linker_flags='$new_inherited_linker_flags' - - # Libraries that this one depends upon. -@@ -10912,7 +9419,7 @@ - - # Directory that this library needs to be installed in: - libdir='$install_libdir'" -- if test no,yes = "$installed,$need_relink"; then -+ if test "$installed" = no && test "$need_relink" = yes; then - $ECHO >> $output "\ - relink_command=\"$relink_command\"" - fi -@@ -10927,29 +9434,27 @@ - exit $EXIT_SUCCESS - } - --if test link = "$opt_mode" || test relink = "$opt_mode"; then -- func_mode_link ${1+"$@"} --fi -+{ test "$opt_mode" = link || test "$opt_mode" = relink; } && -+ func_mode_link ${1+"$@"} - - - # func_mode_uninstall arg... - func_mode_uninstall () - { -- $debug_cmd -- -- RM=$nonopt -+ $opt_debug -+ RM="$nonopt" - files= -- rmforce=false -+ rmforce= - exit_status=0 - - # This variable tells wrapper scripts just to set variables rather - # than running their programs. -- libtool_install_magic=$magic -+ libtool_install_magic="$magic" - - for arg - do - case $arg in -- -f) func_append RM " $arg"; rmforce=: ;; -+ -f) func_append RM " $arg"; rmforce=yes ;; - -*) func_append RM " $arg" ;; - *) func_append files " $arg" ;; - esac -@@ -10962,18 +9467,18 @@ - - for file in $files; do - func_dirname "$file" "" "." -- dir=$func_dirname_result -- if test . = "$dir"; then -- odir=$objdir -+ dir="$func_dirname_result" -+ if test "X$dir" = X.; then -+ odir="$objdir" - else -- odir=$dir/$objdir -+ odir="$dir/$objdir" - fi - func_basename "$file" -- name=$func_basename_result -- test uninstall = "$opt_mode" && odir=$dir -+ name="$func_basename_result" -+ test "$opt_mode" = uninstall && odir="$dir" - - # Remember odir for removal later, being careful to avoid duplicates -- if test clean = "$opt_mode"; then -+ if test "$opt_mode" = clean; then - case " $rmdirs " in - *" $odir "*) ;; - *) func_append rmdirs " $odir" ;; -@@ -10988,11 +9493,11 @@ - elif test -d "$file"; then - exit_status=1 - continue -- elif $rmforce; then -+ elif test "$rmforce" = yes; then - continue - fi - -- rmfiles=$file -+ rmfiles="$file" - - case $name in - *.la) -@@ -11006,7 +9511,7 @@ - done - test -n "$old_library" && func_append rmfiles " $odir/$old_library" - -- case $opt_mode in -+ case "$opt_mode" in - clean) - case " $library_names " in - *" $dlname "*) ;; -@@ -11017,12 +9522,12 @@ - uninstall) - if test -n "$library_names"; then - # Do each command in the postuninstall commands. -- func_execute_cmds "$postuninstall_cmds" '$rmforce || exit_status=1' -+ func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' - fi - - if test -n "$old_library"; then - # Do each command in the old_postuninstall commands. -- func_execute_cmds "$old_postuninstall_cmds" '$rmforce || exit_status=1' -+ func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' - fi - # FIXME: should reinstall the best remaining shared library. - ;; -@@ -11038,19 +9543,21 @@ - func_source $dir/$name - - # Add PIC object to the list of files to remove. -- if test -n "$pic_object" && test none != "$pic_object"; then -+ if test -n "$pic_object" && -+ test "$pic_object" != none; then - func_append rmfiles " $dir/$pic_object" - fi - - # Add non-PIC object to the list of files to remove. -- if test -n "$non_pic_object" && test none != "$non_pic_object"; then -+ if test -n "$non_pic_object" && -+ test "$non_pic_object" != none; then - func_append rmfiles " $dir/$non_pic_object" - fi - fi - ;; - - *) -- if test clean = "$opt_mode"; then -+ if test "$opt_mode" = clean ; then - noexename=$name - case $file in - *.exe) -@@ -11077,12 +9584,12 @@ - - # note $name still contains .exe if it was in $file originally - # as does the version of $file that was added into $rmfiles -- func_append rmfiles " $odir/$name $odir/${name}S.$objext" -- if test yes = "$fast_install" && test -n "$relink_command"; then -+ func_append rmfiles " $odir/$name $odir/${name}S.${objext}" -+ if test "$fast_install" = yes && test -n "$relink_command"; then - func_append rmfiles " $odir/lt-$name" - fi -- if test "X$noexename" != "X$name"; then -- func_append rmfiles " $odir/lt-$noexename.c" -+ if test "X$noexename" != "X$name" ; then -+ func_append rmfiles " $odir/lt-${noexename}.c" - fi - fi - fi -@@ -11091,7 +9598,7 @@ - func_show_eval "$RM $rmfiles" 'exit_status=1' - done - -- # Try to remove the $objdir's in the directories where we deleted files -+ # Try to remove the ${objdir}s in the directories where we deleted files - for dir in $rmdirs; do - if test -d "$dir"; then - func_show_eval "rmdir $dir >/dev/null 2>&1" -@@ -11101,17 +9608,16 @@ - exit $exit_status - } - --if test uninstall = "$opt_mode" || test clean = "$opt_mode"; then -- func_mode_uninstall ${1+"$@"} --fi -+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } && -+ func_mode_uninstall ${1+"$@"} - - test -z "$opt_mode" && { -- help=$generic_help -+ help="$generic_help" - func_fatal_help "you must specify a MODE" - } - - test -z "$exec_cmd" && \ -- func_fatal_help "invalid operation mode '$opt_mode'" -+ func_fatal_help "invalid operation mode \`$opt_mode'" - - if test -n "$exec_cmd"; then - eval exec "$exec_cmd" -@@ -11122,7 +9628,7 @@ - - - # The TAGs below are defined such that we never get into a situation --# where we disable both kinds of libraries. Given conflicting -+# in which we disable both kinds of libraries. Given conflicting - # choices, we go for a static library, that is the most portable, - # since we can't tell whether shared libraries were disabled because - # the user asked for that or because the platform doesn't support -@@ -11145,3 +9651,5 @@ - # mode:shell-script - # sh-indentation:2 - # End: -+# vi:sw=2 -+ -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/lt~obsolete.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/lt~obsolete.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/lt~obsolete.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/lt~obsolete.m4 2020-07-16 10:48:35.382806000 +0200 -@@ -1,7 +1,6 @@ - # lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*- - # --# Copyright (C) 2004-2005, 2007, 2009, 2011-2015 Free Software --# Foundation, Inc. -+# Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc. - # Written by Scott James Remnant, 2004. - # - # This file is free software; the Free Software Foundation gives -@@ -12,7 +11,7 @@ - - # These exist entirely to fool aclocal when bootstrapping libtool. - # --# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN), -+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN) - # which have later been changed to m4_define as they aren't part of the - # exported API, or moved to Autoconf or Automake where they belong. - # -@@ -26,7 +25,7 @@ - # included after everything else. This provides aclocal with the - # AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything - # because those macros already exist, or will be overwritten later. --# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. -+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. - # - # Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here. - # Yes, that means every name once taken will need to remain here until -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/ltoptions.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/ltoptions.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/ltoptions.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/ltoptions.m4 2020-07-16 10:48:35.379441667 +0200 -@@ -1,14 +1,14 @@ - # Helper functions for option handling. -*- Autoconf -*- - # --# Copyright (C) 2004-2005, 2007-2009, 2011-2015 Free Software --# Foundation, Inc. -+# Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, -+# Inc. - # Written by Gary V. Vaughan, 2004 - # - # This file is free software; the Free Software Foundation gives - # unlimited permission to copy and/or distribute it, with or without - # modifications, as long as this notice is preserved. - --# serial 8 ltoptions.m4 -+# serial 7 ltoptions.m4 - - # This is to help aclocal find these macros, as it can't see m4_define. - AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) -@@ -29,7 +29,7 @@ - [m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl - m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]), - _LT_MANGLE_DEFUN([$1], [$2]), -- [m4_warning([Unknown $1 option '$2'])])[]dnl -+ [m4_warning([Unknown $1 option `$2'])])[]dnl - ]) - - -@@ -75,15 +75,13 @@ - dnl - dnl If no reference was made to various pairs of opposing options, then - dnl we run the default mode handler for the pair. For example, if neither -- dnl 'shared' nor 'disable-shared' was passed, we enable building of shared -+ dnl `shared' nor `disable-shared' was passed, we enable building of shared - dnl archives by default: - _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED]) - _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC]) - _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC]) - _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install], -- [_LT_ENABLE_FAST_INSTALL]) -- _LT_UNLESS_OPTIONS([LT_INIT], [aix-soname=aix aix-soname=both aix-soname=svr4], -- [_LT_WITH_AIX_SONAME([aix])]) -+ [_LT_ENABLE_FAST_INSTALL]) - ]) - ])# _LT_SET_OPTIONS - -@@ -114,7 +112,7 @@ - [_LT_SET_OPTION([LT_INIT], [dlopen]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you --put the 'dlopen' option into LT_INIT's first parameter.]) -+put the `dlopen' option into LT_INIT's first parameter.]) - ]) - - dnl aclocal-1.4 backwards compatibility: -@@ -150,7 +148,7 @@ - _LT_SET_OPTION([LT_INIT], [win32-dll]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you --put the 'win32-dll' option into LT_INIT's first parameter.]) -+put the `win32-dll' option into LT_INIT's first parameter.]) - ]) - - dnl aclocal-1.4 backwards compatibility: -@@ -159,9 +157,9 @@ - - # _LT_ENABLE_SHARED([DEFAULT]) - # ---------------------------- --# implement the --enable-shared flag, and supports the 'shared' and --# 'disable-shared' LT_INIT options. --# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. -+# implement the --enable-shared flag, and supports the `shared' and -+# `disable-shared' LT_INIT options. -+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. - m4_define([_LT_ENABLE_SHARED], - [m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl - AC_ARG_ENABLE([shared], -@@ -174,14 +172,14 @@ - *) - enable_shared=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_shared=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac], - [enable_shared=]_LT_ENABLE_SHARED_DEFAULT) -@@ -213,9 +211,9 @@ - - # _LT_ENABLE_STATIC([DEFAULT]) - # ---------------------------- --# implement the --enable-static flag, and support the 'static' and --# 'disable-static' LT_INIT options. --# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. -+# implement the --enable-static flag, and support the `static' and -+# `disable-static' LT_INIT options. -+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. - m4_define([_LT_ENABLE_STATIC], - [m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl - AC_ARG_ENABLE([static], -@@ -228,14 +226,14 @@ - *) - enable_static=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_static=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac], - [enable_static=]_LT_ENABLE_STATIC_DEFAULT) -@@ -267,9 +265,9 @@ - - # _LT_ENABLE_FAST_INSTALL([DEFAULT]) - # ---------------------------------- --# implement the --enable-fast-install flag, and support the 'fast-install' --# and 'disable-fast-install' LT_INIT options. --# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. -+# implement the --enable-fast-install flag, and support the `fast-install' -+# and `disable-fast-install' LT_INIT options. -+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. - m4_define([_LT_ENABLE_FAST_INSTALL], - [m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl - AC_ARG_ENABLE([fast-install], -@@ -282,14 +280,14 @@ - *) - enable_fast_install=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_fast_install=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac], - [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT) -@@ -306,14 +304,14 @@ - [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you put --the 'fast-install' option into LT_INIT's first parameter.]) -+the `fast-install' option into LT_INIT's first parameter.]) - ]) - - AU_DEFUN([AC_DISABLE_FAST_INSTALL], - [_LT_SET_OPTION([LT_INIT], [disable-fast-install]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you put --the 'disable-fast-install' option into LT_INIT's first parameter.]) -+the `disable-fast-install' option into LT_INIT's first parameter.]) - ]) - - dnl aclocal-1.4 backwards compatibility: -@@ -321,64 +319,11 @@ - dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], []) - - --# _LT_WITH_AIX_SONAME([DEFAULT]) --# ---------------------------------- --# implement the --with-aix-soname flag, and support the `aix-soname=aix' --# and `aix-soname=both' and `aix-soname=svr4' LT_INIT options. DEFAULT --# is either `aix', `both' or `svr4'. If omitted, it defaults to `aix'. --m4_define([_LT_WITH_AIX_SONAME], --[m4_define([_LT_WITH_AIX_SONAME_DEFAULT], [m4_if($1, svr4, svr4, m4_if($1, both, both, aix))])dnl --shared_archive_member_spec= --case $host,$enable_shared in --power*-*-aix[[5-9]]*,yes) -- AC_MSG_CHECKING([which variant of shared library versioning to provide]) -- AC_ARG_WITH([aix-soname], -- [AS_HELP_STRING([--with-aix-soname=aix|svr4|both], -- [shared library versioning (aka "SONAME") variant to provide on AIX, @<:@default=]_LT_WITH_AIX_SONAME_DEFAULT[@:>@.])], -- [case $withval in -- aix|svr4|both) -- ;; -- *) -- AC_MSG_ERROR([Unknown argument to --with-aix-soname]) -- ;; -- esac -- lt_cv_with_aix_soname=$with_aix_soname], -- [AC_CACHE_VAL([lt_cv_with_aix_soname], -- [lt_cv_with_aix_soname=]_LT_WITH_AIX_SONAME_DEFAULT) -- with_aix_soname=$lt_cv_with_aix_soname]) -- AC_MSG_RESULT([$with_aix_soname]) -- if test aix != "$with_aix_soname"; then -- # For the AIX way of multilib, we name the shared archive member -- # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o', -- # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File. -- # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag, -- # the AIX toolchain works better with OBJECT_MODE set (default 32). -- if test 64 = "${OBJECT_MODE-32}"; then -- shared_archive_member_spec=shr_64 -- else -- shared_archive_member_spec=shr -- fi -- fi -- ;; --*) -- with_aix_soname=aix -- ;; --esac -- --_LT_DECL([], [shared_archive_member_spec], [0], -- [Shared archive member basename, for filename based shared library versioning on AIX])dnl --])# _LT_WITH_AIX_SONAME -- --LT_OPTION_DEFINE([LT_INIT], [aix-soname=aix], [_LT_WITH_AIX_SONAME([aix])]) --LT_OPTION_DEFINE([LT_INIT], [aix-soname=both], [_LT_WITH_AIX_SONAME([both])]) --LT_OPTION_DEFINE([LT_INIT], [aix-soname=svr4], [_LT_WITH_AIX_SONAME([svr4])]) -- -- - # _LT_WITH_PIC([MODE]) - # -------------------- --# implement the --with-pic flag, and support the 'pic-only' and 'no-pic' -+# implement the --with-pic flag, and support the `pic-only' and `no-pic' - # LT_INIT options. --# MODE is either 'yes' or 'no'. If omitted, it defaults to 'both'. -+# MODE is either `yes' or `no'. If omitted, it defaults to `both'. - m4_define([_LT_WITH_PIC], - [AC_ARG_WITH([pic], - [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@], -@@ -389,17 +334,19 @@ - *) - pic_mode=default - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for lt_pkg in $withval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$lt_pkg" = "X$lt_p"; then - pic_mode=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac], -- [pic_mode=m4_default([$1], [default])]) -+ [pic_mode=default]) -+ -+test -z "$pic_mode" && pic_mode=m4_default([$1], [default]) - - _LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl - ])# _LT_WITH_PIC -@@ -412,7 +359,7 @@ - [_LT_SET_OPTION([LT_INIT], [pic-only]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you --put the 'pic-only' option into LT_INIT's first parameter.]) -+put the `pic-only' option into LT_INIT's first parameter.]) - ]) - - dnl aclocal-1.4 backwards compatibility: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/ltsugar.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/ltsugar.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/ltsugar.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/ltsugar.m4 2020-07-16 10:48:35.380937000 +0200 -@@ -1,7 +1,6 @@ - # ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*- - # --# Copyright (C) 2004-2005, 2007-2008, 2011-2015 Free Software --# Foundation, Inc. -+# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc. - # Written by Gary V. Vaughan, 2004 - # - # This file is free software; the Free Software Foundation gives -@@ -34,7 +33,7 @@ - # ------------ - # Manipulate m4 lists. - # These macros are necessary as long as will still need to support --# Autoconf-2.59, which quotes differently. -+# Autoconf-2.59 which quotes differently. - m4_define([lt_car], [[$1]]) - m4_define([lt_cdr], - [m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])], -@@ -45,7 +44,7 @@ - - # lt_append(MACRO-NAME, STRING, [SEPARATOR]) - # ------------------------------------------ --# Redefine MACRO-NAME to hold its former content plus 'SEPARATOR''STRING'. -+# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'. - # Note that neither SEPARATOR nor STRING are expanded; they are appended - # to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked). - # No SEPARATOR is output if MACRO-NAME was previously undefined (different -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/ltversion.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/ltversion.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/ltversion.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/ltversion.m4 2020-07-16 10:48:35.381648000 +0200 -@@ -1,6 +1,6 @@ - # ltversion.m4 -- version numbers -*- Autoconf -*- - # --# Copyright (C) 2004, 2011-2015 Free Software Foundation, Inc. -+# Copyright (C) 2004 Free Software Foundation, Inc. - # Written by Scott James Remnant, 2004 - # - # This file is free software; the Free Software Foundation gives -@@ -9,15 +9,15 @@ - - # @configure_input@ - --# serial 4179 ltversion.m4 -+# serial 3337 ltversion.m4 - # This file is part of GNU Libtool - --m4_define([LT_PACKAGE_VERSION], [2.4.6]) --m4_define([LT_PACKAGE_REVISION], [2.4.6]) -+m4_define([LT_PACKAGE_VERSION], [2.4.2]) -+m4_define([LT_PACKAGE_REVISION], [1.3337]) - - AC_DEFUN([LTVERSION_VERSION], --[macro_version='2.4.6' --macro_revision='2.4.6' -+[macro_version='2.4.2' -+macro_revision='1.3337' - _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) - _LT_DECL(, macro_revision, 0) - ]) -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/missing psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/missing ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/confdb/missing 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/confdb/missing 2020-07-16 10:48:35.383853000 +0200 -@@ -160,7 +160,7 @@ - ;; - autom4te*) - echo "You might have modified some maintainer files that require" -- echo "the 'autom4te' program to be rebuilt." -+ echo "the 'automa4te' program to be rebuilt." - program_details 'autom4te' - ;; - bison*|yacc*) -@@ -210,6 +210,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/configure psmpi-5.4.6-1/mpich2/src/mpi/romio/configure ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/configure 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/configure 2020-07-16 10:48:35.417269955 +0200 -@@ -737,6 +737,7 @@ - BUILD_AD_NFS_FALSE - BUILD_AD_NFS_TRUE - PVFS2_CONFIG -+IME_INSTALL_PATH - BUILD_F77_TESTS_FALSE - BUILD_F77_TESTS_TRUE - BUILD_F77_BINDINGS_FALSE -@@ -770,7 +771,6 @@ - FROM_LAM - FROM_MPICH - MPI_H_INCLUDE --LT_SYS_LIBRARY_PATH - OTOOL64 - OTOOL - LIPO -@@ -898,7 +898,6 @@ - enable_static - with_pic - enable_fast_install --with_aix_soname - with_gnu_ld - with_sysroot - enable_libtool_lock -@@ -912,6 +911,7 @@ - with_file_system - with_pvfs2 - with_mpi_impl -+with_ime - with_mpi - with_aio_lite - enable_largefile -@@ -926,7 +926,6 @@ - LIBS - CPPFLAGS - CPP --LT_SYS_LIBRARY_PATH - FROM_MPICH - FROM_LAM - FROM_OMPI -@@ -1588,12 +1587,9 @@ - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) - --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use - both] -- --with-aix-soname=aix|svr4|both -- shared library versioning (aka "SONAME") variant to -- provide on AIX, [default=aix]. - --with-gnu-ld assume the C compiler uses GNU ld [default=no] -- --with-sysroot[=DIR] Search for dependent libraries within DIR (or the -- compiler's sysroot if not specified). -+ --with-sysroot=DIR Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified). - --with-mpl-prefix[=DIR] use the MPL library installed in DIR. Pass - "embedded" to force usage of the MPL source - distributed with Hydra. -@@ -1604,6 +1600,8 @@ - - --with-mpi-impl=name - Specify MPI implementation to build ROMIO for - -+--with-ime=PATH - Path to installation of IME -+ - --with-mpi=path - Path to instalation of MPI (headers, libs, etc) - --with-aio-lite use alternate external aio implementation - -@@ -1616,8 +1614,6 @@ - CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if - you have headers in a nonstandard directory <include dir> - CPP C preprocessor -- LT_SYS_LIBRARY_PATH -- User-defined run-time library search path. - FROM_MPICH set to "yes" if building ROMIO inside of MPICH - FROM_LAM set to "yes" if building ROMIO inside of LAM - FROM_OMPI set to "yes" if building ROMIO inside of Open MPI -@@ -3129,8 +3125,8 @@ - ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' - program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` - --# Expand $ac_aux_dir to an absolute path. --am_aux_dir=`cd "$ac_aux_dir" && pwd` -+# expand $ac_aux_dir to an absolute path -+am_aux_dir=`cd $ac_aux_dir && pwd` - - if test x"${MISSING+set}" != xset; then - case $am_aux_dir in -@@ -3149,7 +3145,7 @@ - $as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} - fi - --if test x"${install_sh+set}" != xset; then -+if test x"${install_sh}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; -@@ -3477,8 +3473,8 @@ - # <https://lists.gnu.org/archive/html/automake/2012-07/msg00014.html> - mkdir_p='$(MKDIR_P)' - --# We need awk for the "check" target (and possibly the TAP driver). The --# system "awk" is bad on some platforms. -+# We need awk for the "check" target. The system "awk" is bad on -+# some platforms. - # Always define AMTAR for backward compatibility. Yes, it's still used - # in the wild :-( We should find a proper way to deprecate it ... - AMTAR='$${TAR-tar}' -@@ -4229,65 +4225,6 @@ - ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' - ac_compiler_gnu=$ac_cv_c_compiler_gnu - --ac_ext=c --ac_cpp='$CPP $CPPFLAGS' --ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' --ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' --ac_compiler_gnu=$ac_cv_c_compiler_gnu --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 --$as_echo_n "checking whether $CC understands -c and -o together... " >&6; } --if ${am_cv_prog_cc_c_o+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext --/* end confdefs.h. */ -- --int --main () --{ -- -- ; -- return 0; --} --_ACEOF -- # Make sure it works both with $CC and with simple cc. -- # Following AC_PROG_CC_C_O, we do the test twice because some -- # compilers refuse to overwrite an existing .o file with -o, -- # though they will create one. -- am_cv_prog_cc_c_o=yes -- for am_i in 1 2; do -- if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 -- ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 -- ac_status=$? -- echo "$as_me:$LINENO: \$? = $ac_status" >&5 -- (exit $ac_status); } \ -- && test -f conftest2.$ac_objext; then -- : OK -- else -- am_cv_prog_cc_c_o=no -- break -- fi -- done -- rm -f core conftest* -- unset am_i --fi --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 --$as_echo "$am_cv_prog_cc_c_o" >&6; } --if test "$am_cv_prog_cc_c_o" != yes; then -- # Losing compiler, so override with the script. -- # FIXME: It is wrong to rewrite CC. -- # But if we don't then we get into trouble of one sort or another. -- # A longer-term fix would be to have automake use am__CC in this case, -- # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" -- CC="$am_aux_dir/compile $CC" --fi --ac_ext=c --ac_cpp='$CPP $CPPFLAGS' --ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' --ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' --ac_compiler_gnu=$ac_cv_c_compiler_gnu -- -- - depcc="$CC" am_compiler_list= - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 -@@ -4422,6 +4359,131 @@ - eval pac_save_CFLAGS_${pac_save_CFLAGS_nesting}="" - - -+if test "x$CC" != xcc; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5 -+$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5 -+$as_echo_n "checking whether cc understands -c and -o together... " >&6; } -+fi -+set dummy $CC; ac_cc=`$as_echo "$2" | -+ sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -+if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ -+ -+ ; -+ return 0; -+} -+_ACEOF -+# Make sure it works both with $CC and with simple cc. -+# We do the test twice because some compilers refuse to overwrite an -+# existing .o file with -o, though they will create one. -+ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5' -+rm -f conftest2.* -+if { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } && -+ test -f conftest2.$ac_objext && { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; }; -+then -+ eval ac_cv_prog_cc_${ac_cc}_c_o=yes -+ if test "x$CC" != xcc; then -+ # Test first that cc exists at all. -+ if { ac_try='cc -c conftest.$ac_ext >&5' -+ { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; }; }; then -+ ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5' -+ rm -f conftest2.* -+ if { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } && -+ test -f conftest2.$ac_objext && { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; }; -+ then -+ # cc works too. -+ : -+ else -+ # cc exists but doesn't like -o. -+ eval ac_cv_prog_cc_${ac_cc}_c_o=no -+ fi -+ fi -+ fi -+else -+ eval ac_cv_prog_cc_${ac_cc}_c_o=no -+fi -+rm -f core conftest* -+ -+fi -+if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -+$as_echo "yes" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+ -+$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h -+ -+fi -+ -+# FIXME: we rely on the cache variable name because -+# there is no other way. -+set dummy $CC -+am_cc=`echo $2 | sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -+eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -+if test "$am_t" != yes; then -+ # Losing compiler, so override with the script. -+ # FIXME: It is wrong to rewrite CC. -+ # But if we don't then we get into trouble of one sort or another. -+ # A longer-term fix would be to have automake use am__CC in this case, -+ # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" -+ CC="$am_aux_dir/compile $CC" -+fi -+ - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 - $as_echo_n "checking for grep that handles long lines and -e... " >&6; } -@@ -5087,13 +5149,7 @@ - if ${am_cv_ar_interface+:} false; then : - $as_echo_n "(cached) " >&6 - else -- ac_ext=c --ac_cpp='$CPP $CPPFLAGS' --ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' --ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' --ac_compiler_gnu=$ac_cv_c_compiler_gnu -- -- am_cv_ar_interface=ar -+ am_cv_ar_interface=ar - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - int some_variable = 0; -@@ -5124,11 +5180,6 @@ - - fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -- ac_ext=c --ac_cpp='$CPP $CPPFLAGS' --ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' --ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' --ac_compiler_gnu=$ac_cv_c_compiler_gnu - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_ar_interface" >&5 -@@ -5160,8 +5211,8 @@ - - - --macro_version='2.4.6' --macro_revision='2.4.6' -+macro_version='2.4.2' -+macro_revision='1.3337' - - - -@@ -5175,7 +5226,7 @@ - - - --ltmain=$ac_aux_dir/ltmain.sh -+ltmain="$ac_aux_dir/ltmain.sh" - - # Make sure we can run config.sub. - $SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || -@@ -5295,7 +5346,7 @@ - $ECHO "" - } - --case $ECHO in -+case "$ECHO" in - printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 - $as_echo "printf" >&6; } ;; - print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 -@@ -5488,19 +5539,19 @@ - - # Check whether --with-gnu-ld was given. - if test "${with_gnu_ld+set}" = set; then : -- withval=$with_gnu_ld; test no = "$withval" || with_gnu_ld=yes -+ withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes - else - with_gnu_ld=no - fi - - ac_prog=ld --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - # Check if gcc -print-prog-name=ld gives a path. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 - $as_echo_n "checking for ld used by $CC... " >&6; } - case $host in - *-*-mingw*) -- # gcc leaves a trailing carriage return, which upsets mingw -+ # gcc leaves a trailing carriage return which upsets mingw - ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; - *) - ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; -@@ -5514,7 +5565,7 @@ - while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do - ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` - done -- test -z "$LD" && LD=$ac_prog -+ test -z "$LD" && LD="$ac_prog" - ;; - "") - # If it fails, then pretend we aren't using GCC. -@@ -5525,7 +5576,7 @@ - with_gnu_ld=unknown - ;; - esac --elif test yes = "$with_gnu_ld"; then -+elif test "$with_gnu_ld" = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 - $as_echo_n "checking for GNU ld... " >&6; } - else -@@ -5536,32 +5587,32 @@ - $as_echo_n "(cached) " >&6 - else - if test -z "$LD"; then -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then -- lt_cv_path_LD=$ac_dir/$ac_prog -+ lt_cv_path_LD="$ac_dir/$ac_prog" - # Check to see if the program is GNU ld. I'd rather use --version, - # but apparently some variants of GNU ld only accept -v. - # Break only if it was the GNU/non-GNU ld that we prefer. - case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in - *GNU* | *'with BFD'*) -- test no != "$with_gnu_ld" && break -+ test "$with_gnu_ld" != no && break - ;; - *) -- test yes != "$with_gnu_ld" && break -+ test "$with_gnu_ld" != yes && break - ;; - esac - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - else -- lt_cv_path_LD=$LD # Let the user override the test with a path. -+ lt_cv_path_LD="$LD" # Let the user override the test with a path. - fi - fi - --LD=$lt_cv_path_LD -+LD="$lt_cv_path_LD" - if test -n "$LD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5 - $as_echo "$LD" >&6; } -@@ -5604,38 +5655,33 @@ - else - if test -n "$NM"; then - # Let the user override the test. -- lt_cv_path_NM=$NM -+ lt_cv_path_NM="$NM" - else -- lt_nm_to_check=${ac_tool_prefix}nm -+ lt_nm_to_check="${ac_tool_prefix}nm" - if test -n "$ac_tool_prefix" && test "$build" = "$host"; then - lt_nm_to_check="$lt_nm_to_check nm" - fi - for lt_tmp_nm in $lt_nm_to_check; do -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- tmp_nm=$ac_dir/$lt_tmp_nm -- if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then -+ tmp_nm="$ac_dir/$lt_tmp_nm" -+ if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then - # Check to see if the nm accepts a BSD-compat flag. -- # Adding the 'sed 1q' prevents false positives on HP-UX, which says: -+ # Adding the `sed 1q' prevents false positives on HP-UX, which says: - # nm: unknown option "B" ignored - # Tru64's nm complains that /dev/null is an invalid object file -- # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty -- case $build_os in -- mingw*) lt_bad_file=conftest.nm/nofile ;; -- *) lt_bad_file=/dev/null ;; -- esac -- case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in -- *$lt_bad_file* | *'Invalid file or object type'*) -+ case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in -+ */dev/null* | *'Invalid file or object type'*) - lt_cv_path_NM="$tmp_nm -B" -- break 2 -+ break - ;; - *) - case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in - */dev/null*) - lt_cv_path_NM="$tmp_nm -p" -- break 2 -+ break - ;; - *) - lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but -@@ -5646,15 +5692,15 @@ - esac - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - done - : ${lt_cv_path_NM=no} - fi - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 - $as_echo "$lt_cv_path_NM" >&6; } --if test no != "$lt_cv_path_NM"; then -- NM=$lt_cv_path_NM -+if test "$lt_cv_path_NM" != "no"; then -+ NM="$lt_cv_path_NM" - else - # Didn't find any BSD compatible name lister, look for dumpbin. - if test -n "$DUMPBIN"; then : -@@ -5760,9 +5806,9 @@ - fi - fi - -- case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in -+ case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in - *COFF*) -- DUMPBIN="$DUMPBIN -symbols -headers" -+ DUMPBIN="$DUMPBIN -symbols" - ;; - *) - DUMPBIN=: -@@ -5770,8 +5816,8 @@ - esac - fi - -- if test : != "$DUMPBIN"; then -- NM=$DUMPBIN -+ if test "$DUMPBIN" != ":"; then -+ NM="$DUMPBIN" - fi - fi - test -z "$NM" && NM=nm -@@ -5822,7 +5868,7 @@ - $as_echo_n "(cached) " >&6 - else - i=0 -- teststring=ABCD -+ teststring="ABCD" - - case $build_os in - msdosdjgpp*) -@@ -5862,7 +5908,7 @@ - lt_cv_sys_max_cmd_len=8192; - ;; - -- bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*) -+ netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) - # This has been around since 386BSD, at least. Likely further. - if test -x /sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` -@@ -5912,23 +5958,22 @@ - ;; - *) - lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` -- if test -n "$lt_cv_sys_max_cmd_len" && \ -- test undefined != "$lt_cv_sys_max_cmd_len"; then -+ if test -n "$lt_cv_sys_max_cmd_len"; then - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - else - # Make teststring a little bigger before we do anything with it. - # a 1K string should be a reasonable start. -- for i in 1 2 3 4 5 6 7 8; do -+ for i in 1 2 3 4 5 6 7 8 ; do - teststring=$teststring$teststring - done - SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} - # If test is not a shell built-in, we'll probably end up computing a - # maximum length that is only half of the actual maximum length, but - # we can't tell. -- while { test X`env echo "$teststring$teststring" 2>/dev/null` \ -+ while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ - = "X$teststring$teststring"; } >/dev/null 2>&1 && -- test 17 != "$i" # 1/2 MB should be enough -+ test $i != 17 # 1/2 MB should be enough - do - i=`expr $i + 1` - teststring=$teststring$teststring -@@ -5946,7 +5991,7 @@ - - fi - --if test -n "$lt_cv_sys_max_cmd_len"; then -+if test -n $lt_cv_sys_max_cmd_len ; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 - $as_echo "$lt_cv_sys_max_cmd_len" >&6; } - else -@@ -5964,6 +6009,30 @@ - : ${MV="mv -f"} - : ${RM="rm -f"} - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 -+$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } -+# Try some XSI features -+xsi_shell=no -+( _lt_dummy="a/b/c" -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ -+ && eval 'test $(( 1 + 1 )) -eq 2 \ -+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ -+ && xsi_shell=yes -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 -+$as_echo "$xsi_shell" >&6; } -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 -+$as_echo_n "checking whether the shell understands \"+=\"... " >&6; } -+lt_shell_append=no -+( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ -+ >/dev/null 2>&1 \ -+ && lt_shell_append=yes -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 -+$as_echo "$lt_shell_append" >&6; } -+ -+ - if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - lt_unset=unset - else -@@ -6086,13 +6155,13 @@ - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - reload_cmds=false - fi - ;; - darwin*) -- if test yes = "$GCC"; then -- reload_cmds='$LTCC $LTCFLAGS -nostdlib $wl-r -o $output$reload_objs' -+ if test "$GCC" = yes; then -+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' - else - reload_cmds='$LD$reload_flag -o $output$reload_objs' - fi -@@ -6220,13 +6289,13 @@ - # Need to set the preceding variable on all platforms that support - # interlibrary dependencies. - # 'none' -- dependencies not supported. --# 'unknown' -- same as none, but documents that we really don't know. -+# `unknown' -- same as none, but documents that we really don't know. - # 'pass_all' -- all dependencies passed with no checks. - # 'test_compile' -- check by making test program. - # 'file_magic [[regex]]' -- check by looking for files in library path --# that responds to the $file_magic_cmd with a given extended regex. --# If you have 'file' or equivalent on your system and you're not sure --# whether 'pass_all' will *always* work, you probably want this one. -+# which responds to the $file_magic_cmd with a given extended regex. -+# If you have `file' or equivalent on your system and you're not sure -+# whether `pass_all' will *always* work, you probably want this one. - - case $host_os in - aix[4-9]*) -@@ -6253,7 +6322,8 @@ - # Base MSYS/MinGW do not provide the 'file' command needed by - # func_win32_libid shell function, so use a weaker test based on 'objdump', - # unless we find 'file', for example because we are cross-compiling. -- if ( file / ) >/dev/null 2>&1; then -+ # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. -+ if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -@@ -6289,6 +6359,10 @@ - fi - ;; - -+gnu*) -+ lt_cv_deplibs_check_method=pass_all -+ ;; -+ - haiku*) - lt_cv_deplibs_check_method=pass_all - ;; -@@ -6327,7 +6401,7 @@ - ;; - - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - -@@ -6349,8 +6423,8 @@ - lt_cv_deplibs_check_method=pass_all - ;; - --openbsd* | bitrig*) -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+openbsd*) -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' -@@ -6403,9 +6477,6 @@ - tpf*) - lt_cv_deplibs_check_method=pass_all - ;; --os2*) -- lt_cv_deplibs_check_method=pass_all -- ;; - esac - - fi -@@ -6563,8 +6634,8 @@ - - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) -- # two different shell functions defined in ltmain.sh; -- # decide which one to use based on capabilities of $DLLTOOL -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL - case `$DLLTOOL --help 2>&1` in - *--identify-strict*) - lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -@@ -6576,7 +6647,7 @@ - ;; - *) - # fallback: assume linklib IS sharedlib -- lt_cv_sharedlib_from_linklib_cmd=$ECHO -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" - ;; - esac - -@@ -6730,7 +6801,7 @@ - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -- if test 0 -eq "$ac_status"; then -+ if test "$ac_status" -eq 0; then - # Ensure the archiver fails upon bogus file names. - rm -f conftest.$ac_objext libconftest.a - { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -@@ -6738,7 +6809,7 @@ - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -- if test 0 -ne "$ac_status"; then -+ if test "$ac_status" -ne 0; then - lt_cv_ar_at_file=@ - fi - fi -@@ -6751,7 +6822,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 - $as_echo "$lt_cv_ar_at_file" >&6; } - --if test no = "$lt_cv_ar_at_file"; then -+if test "x$lt_cv_ar_at_file" = xno; then - archiver_list_spec= - else - archiver_list_spec=$lt_cv_ar_at_file -@@ -6968,7 +7039,7 @@ - - if test -n "$RANLIB"; then - case $host_os in -- bitrig* | openbsd*) -+ openbsd*) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" - ;; - *) -@@ -7058,7 +7129,7 @@ - symcode='[ABCDGISTW]' - ;; - hpux*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - symcode='[ABCDEGRST]' - fi - ;; -@@ -7091,44 +7162,14 @@ - symcode='[ABCDGIRSTW]' ;; - esac - --if test "$lt_cv_nm_interface" = "MS dumpbin"; then -- # Gets list of data symbols to import. -- lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'" -- # Adjust the below global symbol transforms to fixup imported variables. -- lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'" -- lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'" -- lt_c_name_lib_hook="\ -- -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\ -- -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'" --else -- # Disable hooks by default. -- lt_cv_sys_global_symbol_to_import= -- lt_cdecl_hook= -- lt_c_name_hook= -- lt_c_name_lib_hook= --fi -- - # Transform an extracted symbol line into a proper C declaration. - # Some systems (esp. on ia64) link data and code symbols differently, - # so use this general approach. --lt_cv_sys_global_symbol_to_cdecl="sed -n"\ --$lt_cdecl_hook\ --" -e 's/^T .* \(.*\)$/extern int \1();/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'" -+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n"\ --$lt_c_name_hook\ --" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'" -- --# Transform an extracted symbol line into symbol name with lib prefix and --# symbol address. --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\ --$lt_c_name_lib_hook\ --" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ --" -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -7146,24 +7187,21 @@ - - # Write the raw and C identifiers. - if test "$lt_cv_nm_interface" = "MS dumpbin"; then -- # Fake it for dumpbin and say T for any non-static function, -- # D for any global variable and I for any imported variable. -+ # Fake it for dumpbin and say T for any non-static function -+ # and D for any global variable. - # Also find C++ and __fastcall symbols from MSVC++, - # which start with @ or ?. - lt_cv_sys_global_symbol_pipe="$AWK '"\ - " {last_section=section; section=\$ 3};"\ - " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ - " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ --" /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\ --" /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\ --" /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\ - " \$ 0!~/External *\|/{next};"\ - " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ - " {if(hide[section]) next};"\ --" {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\ --" {split(\$ 0,a,/\||\r/); split(a[2],s)};"\ --" s[1]~/^[@?]/{print f,s[1],s[1]; next};"\ --" s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\ -+" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ -+" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ -+" s[1]~/^[@?]/{print s[1], s[1]; next};"\ -+" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ - " ' prfx=^$ac_symprfx" - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" -@@ -7211,11 +7249,11 @@ - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext - /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ --#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE --/* DATA imports from DLLs on WIN32 can't be const, because runtime -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime - relocations are performed -- see ld's documentation on pseudo-relocs. */ - # define LT_DLSYM_CONST --#elif defined __osf__ -+#elif defined(__osf__) - /* This system does not cope well with relocations in const data. */ - # define LT_DLSYM_CONST - #else -@@ -7241,7 +7279,7 @@ - { - { "@PROGRAM@", (void *) 0 }, - _LT_EOF -- $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext -+ $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext - cat <<\_LT_EOF >> conftest.$ac_ext - {0, (void *) 0} - }; -@@ -7261,13 +7299,13 @@ - mv conftest.$ac_objext conftstm.$ac_objext - lt_globsym_save_LIBS=$LIBS - lt_globsym_save_CFLAGS=$CFLAGS -- LIBS=conftstm.$ac_objext -+ LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -- test $ac_status = 0; } && test -s conftest$ac_exeext; then -+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi - LIBS=$lt_globsym_save_LIBS -@@ -7288,7 +7326,7 @@ - rm -rf conftest* conftst* - - # Do not use the global_symbol_pipe unless it works. -- if test yes = "$pipe_works"; then -+ if test "$pipe_works" = yes; then - break - else - lt_cv_sys_global_symbol_pipe= -@@ -7341,16 +7379,6 @@ - - - -- -- -- -- -- -- -- -- -- -- - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 - $as_echo_n "checking for sysroot... " >&6; } - -@@ -7363,9 +7391,9 @@ - - - lt_sysroot= --case $with_sysroot in #( -+case ${with_sysroot} in #( - yes) -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - lt_sysroot=`$CC --print-sysroot 2>/dev/null` - fi - ;; #( -@@ -7375,8 +7403,8 @@ - no|'') - ;; #( - *) -- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_sysroot" >&5 --$as_echo "$with_sysroot" >&6; } -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5 -+$as_echo "${with_sysroot}" >&6; } - as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 - ;; - esac -@@ -7388,99 +7416,18 @@ - - - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a working dd" >&5 --$as_echo_n "checking for a working dd... " >&6; } --if ${ac_cv_path_lt_DD+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- printf 0123456789abcdef0123456789abcdef >conftest.i --cat conftest.i conftest.i >conftest2.i --: ${lt_DD:=$DD} --if test -z "$lt_DD"; then -- ac_path_lt_DD_found=false -- # Loop through the user's path and test for each of PROGNAME-LIST -- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR --for as_dir in $PATH --do -- IFS=$as_save_IFS -- test -z "$as_dir" && as_dir=. -- for ac_prog in dd; do -- for ac_exec_ext in '' $ac_executable_extensions; do -- ac_path_lt_DD="$as_dir/$ac_prog$ac_exec_ext" -- as_fn_executable_p "$ac_path_lt_DD" || continue --if "$ac_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then -- cmp -s conftest.i conftest.out \ -- && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=: --fi -- $ac_path_lt_DD_found && break 3 -- done -- done -- done --IFS=$as_save_IFS -- if test -z "$ac_cv_path_lt_DD"; then -- : -- fi --else -- ac_cv_path_lt_DD=$lt_DD --fi -- --rm -f conftest.i conftest2.i conftest.out --fi --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_lt_DD" >&5 --$as_echo "$ac_cv_path_lt_DD" >&6; } -- -- --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to truncate binary pipes" >&5 --$as_echo_n "checking how to truncate binary pipes... " >&6; } --if ${lt_cv_truncate_bin+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- printf 0123456789abcdef0123456789abcdef >conftest.i --cat conftest.i conftest.i >conftest2.i --lt_cv_truncate_bin= --if "$ac_cv_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then -- cmp -s conftest.i conftest.out \ -- && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1" --fi --rm -f conftest.i conftest2.i conftest.out --test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q" --fi --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_truncate_bin" >&5 --$as_echo "$lt_cv_truncate_bin" >&6; } -- -- -- -- -- -- -- --# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. --func_cc_basename () --{ -- for cc_temp in $*""; do -- case $cc_temp in -- compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -- distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -- \-*) ;; -- *) break;; -- esac -- done -- func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` --} -- - # Check whether --enable-libtool-lock was given. - if test "${enable_libtool_lock+set}" = set; then : - enableval=$enable_libtool_lock; - fi - --test no = "$enable_libtool_lock" || enable_libtool_lock=yes -+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes - - # Some flags need to be propagated to the compiler or linker for good - # libtool support. - case $host in - ia64-*-hpux*) -- # Find out what ABI is being produced by ac_compile, and set mode -- # options accordingly. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 -@@ -7489,25 +7436,24 @@ - test $ac_status = 0; }; then - case `/usr/bin/file conftest.$ac_objext` in - *ELF-32*) -- HPUX_IA64_MODE=32 -+ HPUX_IA64_MODE="32" - ;; - *ELF-64*) -- HPUX_IA64_MODE=64 -+ HPUX_IA64_MODE="64" - ;; - esac - fi - rm -rf conftest* - ;; - *-*-irix6*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -+ # Find out which ABI we are using. - echo '#line '$LINENO' "configure"' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -melf32bsmip" -@@ -7536,50 +7482,9 @@ - rm -rf conftest* - ;; - --mips64*-*linux*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -- echo '#line '$LINENO' "configure"' > conftest.$ac_ext -- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 -- (eval $ac_compile) 2>&5 -- ac_status=$? -- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -- test $ac_status = 0; }; then -- emul=elf -- case `/usr/bin/file conftest.$ac_objext` in -- *32-bit*) -- emul="${emul}32" -- ;; -- *64-bit*) -- emul="${emul}64" -- ;; -- esac -- case `/usr/bin/file conftest.$ac_objext` in -- *MSB*) -- emul="${emul}btsmip" -- ;; -- *LSB*) -- emul="${emul}ltsmip" -- ;; -- esac -- case `/usr/bin/file conftest.$ac_objext` in -- *N32*) -- emul="${emul}n32" -- ;; -- esac -- LD="${LD-ld} -m $emul" -- fi -- rm -rf conftest* -- ;; -- --x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ -+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ - s390*-*linux*|s390*-*tpf*|sparc*-*linux*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. Note that the listed cases only cover the -- # situations where additional linker options are needed (such as when -- # doing 32-bit compilation for a host where ld defaults to 64-bit, or -- # vice versa); the common cases where no linker options are needed do -- # not appear in the list. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 -@@ -7593,19 +7498,9 @@ - LD="${LD-ld} -m elf_i386_fbsd" - ;; - x86_64-*linux*) -- case `/usr/bin/file conftest.o` in -- *x86-64*) -- LD="${LD-ld} -m elf32_x86_64" -- ;; -- *) -- LD="${LD-ld} -m elf_i386" -- ;; -- esac -+ LD="${LD-ld} -m elf_i386" - ;; -- powerpc64le-*linux*) -- LD="${LD-ld} -m elf32lppclinux" -- ;; -- powerpc64-*linux*) -+ ppc64-*linux*|powerpc64-*linux*) - LD="${LD-ld} -m elf32ppclinux" - ;; - s390x-*linux*) -@@ -7624,10 +7519,7 @@ - x86_64-*linux*) - LD="${LD-ld} -m elf_x86_64" - ;; -- powerpcle-*linux*) -- LD="${LD-ld} -m elf64lppc" -- ;; -- powerpc-*linux*) -+ ppc*-*linux*|powerpc*-*linux*) - LD="${LD-ld} -m elf64ppc" - ;; - s390*-*linux*|s390*-*tpf*) -@@ -7645,7 +7537,7 @@ - - *-*-sco3.2v5*) - # On SCO OpenServer 5, we need -belf to get full-featured binaries. -- SAVE_CFLAGS=$CFLAGS -+ SAVE_CFLAGS="$CFLAGS" - CFLAGS="$CFLAGS -belf" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 - $as_echo_n "checking whether the C compiler needs -belf... " >&6; } -@@ -7685,14 +7577,13 @@ - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 - $as_echo "$lt_cv_cc_needs_belf" >&6; } -- if test yes != "$lt_cv_cc_needs_belf"; then -+ if test x"$lt_cv_cc_needs_belf" != x"yes"; then - # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf -- CFLAGS=$SAVE_CFLAGS -+ CFLAGS="$SAVE_CFLAGS" - fi - ;; - *-*solaris*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 -@@ -7704,7 +7595,7 @@ - case $lt_cv_prog_gnu_ld in - yes*) - case $host in -- i?86-*-solaris*|x86_64-*-solaris*) -+ i?86-*-solaris*) - LD="${LD-ld} -m elf_x86_64" - ;; - sparc*-*-solaris*) -@@ -7713,7 +7604,7 @@ - esac - # GNU ld 2.21 introduced _sol2 emulations. Use them if available. - if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then -- LD=${LD-ld}_sol2 -+ LD="${LD-ld}_sol2" - fi - ;; - *) -@@ -7729,7 +7620,7 @@ - ;; - esac - --need_locks=$enable_libtool_lock -+need_locks="$enable_libtool_lock" - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -@@ -7840,7 +7731,7 @@ - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 - $as_echo "$lt_cv_path_mainfest_tool" >&6; } --if test yes != "$lt_cv_path_mainfest_tool"; then -+if test "x$lt_cv_path_mainfest_tool" != xyes; then - MANIFEST_TOOL=: - fi - -@@ -8343,7 +8234,7 @@ - $as_echo_n "(cached) " >&6 - else - lt_cv_apple_cc_single_mod=no -- if test -z "$LT_MULTI_MODULE"; then -+ if test -z "${LT_MULTI_MODULE}"; then - # By default we will add the -single_module flag. You can override - # by either setting the environment variable LT_MULTI_MODULE - # non-empty at configure time, or by adding -multi_module to the -@@ -8361,7 +8252,7 @@ - cat conftest.err >&5 - # Otherwise, if the output was created with a 0 exit code from - # the compiler, it worked. -- elif test -f libconftest.dylib && test 0 = "$_lt_result"; then -+ elif test -f libconftest.dylib && test $_lt_result -eq 0; then - lt_cv_apple_cc_single_mod=yes - else - cat conftest.err >&5 -@@ -8400,7 +8291,7 @@ - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 -@@ -8429,7 +8320,7 @@ - _lt_result=$? - if test -s conftest.err && $GREP force_load conftest.err; then - cat conftest.err >&5 -- elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then -+ elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then - lt_cv_ld_force_load=yes - else - cat conftest.err >&5 -@@ -8442,32 +8333,32 @@ - $as_echo "$lt_cv_ld_force_load" >&6; } - case $host_os in - rhapsody* | darwin1.[012]) -- _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; - darwin1.*) -- _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - darwin*) # darwin 5.x on - # if running on 10.5 or later, the deployment target defaults - # to the OS version, if on x86, and 10.4, the deployment - # target defaults to 10.4. Don't you love it? - case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in - 10.0,*86*-darwin8*|10.0,*-darwin[91]*) -- _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; -- 10.[012][,.]*) -- _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; -+ 10.[012]*) -+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - 10.*) -- _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - esac - ;; - esac -- if test yes = "$lt_cv_apple_cc_single_mod"; then -+ if test "$lt_cv_apple_cc_single_mod" = "yes"; then - _lt_dar_single_mod='$single_module' - fi -- if test yes = "$lt_cv_ld_exported_symbols_list"; then -- _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym' -+ if test "$lt_cv_ld_exported_symbols_list" = "yes"; then -+ _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' - else -- _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib' -+ _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' - fi -- if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then -+ if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then - _lt_dsymutil='~$DSYMUTIL $lib || :' - else - _lt_dsymutil= -@@ -8475,41 +8366,6 @@ - ;; - esac - --# func_munge_path_list VARIABLE PATH --# ----------------------------------- --# VARIABLE is name of variable containing _space_ separated list of --# directories to be munged by the contents of PATH, which is string --# having a format: --# "DIR[:DIR]:" --# string "DIR[ DIR]" will be prepended to VARIABLE --# ":DIR[:DIR]" --# string "DIR[ DIR]" will be appended to VARIABLE --# "DIRP[:DIRP]::[DIRA:]DIRA" --# string "DIRP[ DIRP]" will be prepended to VARIABLE and string --# "DIRA[ DIRA]" will be appended to VARIABLE --# "DIR[:DIR]" --# VARIABLE will be replaced by "DIR[ DIR]" --func_munge_path_list () --{ -- case x$2 in -- x) -- ;; -- *:) -- eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\" -- ;; -- x:*) -- eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\" -- ;; -- *::*) -- eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" -- eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\" -- ;; -- *) -- eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\" -- ;; -- esac --} -- - for ac_header in dlfcn.h - do : - ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default -@@ -8546,14 +8402,14 @@ - *) - enable_shared=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_shared=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac - else -@@ -8577,14 +8433,14 @@ - *) - enable_static=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_static=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac - else -@@ -8608,14 +8464,14 @@ - *) - pic_mode=default - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for lt_pkg in $withval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$lt_pkg" = "X$lt_p"; then - pic_mode=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac - else -@@ -8623,6 +8479,8 @@ - fi - - -+test -z "$pic_mode" && pic_mode=default -+ - - - -@@ -8638,14 +8496,14 @@ - *) - enable_fast_install=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_fast_install=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac - else -@@ -8659,63 +8517,11 @@ - - - -- shared_archive_member_spec= --case $host,$enable_shared in --power*-*-aix[5-9]*,yes) -- { $as_echo "$as_me:${as_lineno-$LINENO}: checking which variant of shared library versioning to provide" >&5 --$as_echo_n "checking which variant of shared library versioning to provide... " >&6; } -- --# Check whether --with-aix-soname was given. --if test "${with_aix_soname+set}" = set; then : -- withval=$with_aix_soname; case $withval in -- aix|svr4|both) -- ;; -- *) -- as_fn_error $? "Unknown argument to --with-aix-soname" "$LINENO" 5 -- ;; -- esac -- lt_cv_with_aix_soname=$with_aix_soname --else -- if ${lt_cv_with_aix_soname+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- lt_cv_with_aix_soname=aix --fi -- -- with_aix_soname=$lt_cv_with_aix_soname --fi -- -- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_aix_soname" >&5 --$as_echo "$with_aix_soname" >&6; } -- if test aix != "$with_aix_soname"; then -- # For the AIX way of multilib, we name the shared archive member -- # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o', -- # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File. -- # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag, -- # the AIX toolchain works better with OBJECT_MODE set (default 32). -- if test 64 = "${OBJECT_MODE-32}"; then -- shared_archive_member_spec=shr_64 -- else -- shared_archive_member_spec=shr -- fi -- fi -- ;; --*) -- with_aix_soname=aix -- ;; --esac -- -- -- -- -- -- -- - - - - # This can be used to rebuild libtool when needed --LIBTOOL_DEPS=$ltmain -+LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. - LIBTOOL='$(SHELL) $(top_builddir)/libtool' -@@ -8764,7 +8570,7 @@ - - - --if test -n "${ZSH_VERSION+set}"; then -+if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - -@@ -8803,7 +8609,7 @@ - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. -- if test set != "${COLLECT_NAMES+set}"; then -+ if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi -@@ -8814,14 +8620,14 @@ - ofile=libtool - can_build_shared=yes - --# All known linkers require a '.a' archive for static linking (except MSVC, -+# All known linkers require a `.a' archive for static linking (except MSVC, - # which needs '.lib'). - libext=a - --with_gnu_ld=$lt_cv_prog_gnu_ld -+with_gnu_ld="$lt_cv_prog_gnu_ld" - --old_CC=$CC --old_CFLAGS=$CFLAGS -+old_CC="$CC" -+old_CFLAGS="$CFLAGS" - - # Set sane defaults for various variables - test -z "$CC" && CC=cc -@@ -8830,8 +8636,15 @@ - test -z "$LD" && LD=ld - test -z "$ac_objext" && ac_objext=o - --func_cc_basename $compiler --cc_basename=$func_cc_basename_result -+for cc_temp in $compiler""; do -+ case $cc_temp in -+ compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -+ distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -+ \-*) ;; -+ *) break;; -+ esac -+done -+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - - - # Only perform the check for file, if the check method requires it -@@ -8846,22 +8659,22 @@ - else - case $MAGIC_CMD in - [\\/*] | ?:[\\/]*) -- lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. -+ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; - *) -- lt_save_MAGIC_CMD=$MAGIC_CMD -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_MAGIC_CMD="$MAGIC_CMD" -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" - for ac_dir in $ac_dummy; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- if test -f "$ac_dir/${ac_tool_prefix}file"; then -- lt_cv_path_MAGIC_CMD=$ac_dir/"${ac_tool_prefix}file" -+ if test -f $ac_dir/${ac_tool_prefix}file; then -+ lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` -- MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : -@@ -8884,13 +8697,13 @@ - break - fi - done -- IFS=$lt_save_ifs -- MAGIC_CMD=$lt_save_MAGIC_CMD -+ IFS="$lt_save_ifs" -+ MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; - esac - fi - --MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if test -n "$MAGIC_CMD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 - $as_echo "$MAGIC_CMD" >&6; } -@@ -8912,22 +8725,22 @@ - else - case $MAGIC_CMD in - [\\/*] | ?:[\\/]*) -- lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. -+ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; - *) -- lt_save_MAGIC_CMD=$MAGIC_CMD -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_MAGIC_CMD="$MAGIC_CMD" -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" - for ac_dir in $ac_dummy; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- if test -f "$ac_dir/file"; then -- lt_cv_path_MAGIC_CMD=$ac_dir/"file" -+ if test -f $ac_dir/file; then -+ lt_cv_path_MAGIC_CMD="$ac_dir/file" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` -- MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : -@@ -8950,13 +8763,13 @@ - break - fi - done -- IFS=$lt_save_ifs -- MAGIC_CMD=$lt_save_MAGIC_CMD -+ IFS="$lt_save_ifs" -+ MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; - esac - fi - --MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if test -n "$MAGIC_CMD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 - $as_echo "$MAGIC_CMD" >&6; } -@@ -8977,7 +8790,7 @@ - - # Use C for the default configuration in the libtool script - --lt_save_CC=$CC -+lt_save_CC="$CC" - ac_ext=c - ac_cpp='$CPP $CPPFLAGS' - ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -@@ -9035,7 +8848,7 @@ - - lt_prog_compiler_no_builtin_flag= - --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - case $cc_basename in - nvcc*) - lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; -@@ -9051,7 +8864,7 @@ - lt_cv_prog_compiler_rtti_exceptions=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext -- lt_compiler_flag="-fno-rtti -fno-exceptions" ## exclude from sc_useless_quotes_in_assignment -+ lt_compiler_flag="-fno-rtti -fno-exceptions" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins -@@ -9081,7 +8894,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 - $as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } - --if test yes = "$lt_cv_prog_compiler_rtti_exceptions"; then -+if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then - lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" - else - : -@@ -9099,18 +8912,17 @@ - lt_prog_compiler_static= - - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_static='-static' - - case $host_os in - aix*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static='-Bstatic' - fi -- lt_prog_compiler_pic='-fPIC' - ;; - - amigaos*) -@@ -9121,8 +8933,8 @@ - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but -- # adding the '-m68020' flag to GCC prevents building anything better, -- # like '-m68040'. -+ # adding the `-m68020' flag to GCC prevents building anything better, -+ # like `-m68040'. - lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' - ;; - esac -@@ -9138,11 +8950,6 @@ - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - lt_prog_compiler_pic='-DDLL_EXPORT' -- case $host_os in -- os2*) -- lt_prog_compiler_static='$wl-static' -- ;; -- esac - ;; - - darwin* | rhapsody*) -@@ -9213,7 +9020,7 @@ - case $host_os in - aix*) - lt_prog_compiler_wl='-Wl,' -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static='-Bstatic' - else -@@ -9221,29 +9028,10 @@ - fi - ;; - -- darwin* | rhapsody*) -- # PIC is the default on this platform -- # Common symbols not allowed in MH_DYLIB files -- lt_prog_compiler_pic='-fno-common' -- case $cc_basename in -- nagfor*) -- # NAG Fortran compiler -- lt_prog_compiler_wl='-Wl,-Wl,,' -- lt_prog_compiler_pic='-PIC' -- lt_prog_compiler_static='-Bstatic' -- ;; -- esac -- ;; -- - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - lt_prog_compiler_pic='-DDLL_EXPORT' -- case $host_os in -- os2*) -- lt_prog_compiler_static='$wl-static' -- ;; -- esac - ;; - - hpux9* | hpux10* | hpux11*) -@@ -9259,7 +9047,7 @@ - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? -- lt_prog_compiler_static='$wl-a ${wl}archive' -+ lt_prog_compiler_static='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) -@@ -9268,9 +9056,9 @@ - lt_prog_compiler_static='-non_shared' - ;; - -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in -- # old Intel for x86_64, which still supported -KPIC. -+ # old Intel for x86_64 which still supported -KPIC. - ecc*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-KPIC' -@@ -9295,12 +9083,6 @@ - lt_prog_compiler_pic='-PIC' - lt_prog_compiler_static='-Bstatic' - ;; -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- lt_prog_compiler_wl='-Wl,' -- lt_prog_compiler_pic='-fPIC' -- lt_prog_compiler_static='-static' -- ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -9398,7 +9180,7 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - lt_prog_compiler_pic='-Kconform_pic' - lt_prog_compiler_static='-Bstatic' - fi -@@ -9427,7 +9209,7 @@ - fi - - case $host_os in -- # For platforms that do not support PIC, -DPIC is meaningless: -+ # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - lt_prog_compiler_pic= - ;; -@@ -9459,7 +9241,7 @@ - lt_cv_prog_compiler_pic_works=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext -- lt_compiler_flag="$lt_prog_compiler_pic -DPIC" ## exclude from sc_useless_quotes_in_assignment -+ lt_compiler_flag="$lt_prog_compiler_pic -DPIC" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins -@@ -9489,7 +9271,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 - $as_echo "$lt_cv_prog_compiler_pic_works" >&6; } - --if test yes = "$lt_cv_prog_compiler_pic_works"; then -+if test x"$lt_cv_prog_compiler_pic_works" = xyes; then - case $lt_prog_compiler_pic in - "" | " "*) ;; - *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; -@@ -9521,7 +9303,7 @@ - $as_echo_n "(cached) " >&6 - else - lt_cv_prog_compiler_static_works=no -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $lt_tmp_static_flag" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then -@@ -9540,13 +9322,13 @@ - fi - fi - $RM -r conftest* -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 - $as_echo "$lt_cv_prog_compiler_static_works" >&6; } - --if test yes = "$lt_cv_prog_compiler_static_works"; then -+if test x"$lt_cv_prog_compiler_static_works" = xyes; then - : - else - lt_prog_compiler_static= -@@ -9666,8 +9448,8 @@ - - - --hard_links=nottested --if test no = "$lt_cv_prog_compiler_c_o" && test no != "$need_locks"; then -+hard_links="nottested" -+if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 - $as_echo_n "checking if we can lock with hard links... " >&6; } -@@ -9679,9 +9461,9 @@ - ln conftest.a conftest.b 2>/dev/null && hard_links=no - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 - $as_echo "$hard_links" >&6; } -- if test no = "$hard_links"; then -- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&5 --$as_echo "$as_me: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&2;} -+ if test "$hard_links" = no; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -+$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} - need_locks=warn - fi - else -@@ -9724,9 +9506,9 @@ - # included in the symbol list - include_expsyms= - # exclude_expsyms can be an extended regexp of symbols to exclude -- # it will be wrapped by ' (' and ')$', so one must not match beginning or -- # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', -- # as well as any symbol that contains 'd'. -+ # it will be wrapped by ` (' and `)$', so one must not match beginning or -+ # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', -+ # as well as any symbol that contains `d'. - exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if -@@ -9741,7 +9523,7 @@ - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; -@@ -9749,7 +9531,7 @@ - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; -- openbsd* | bitrig*) -+ openbsd*) - with_gnu_ld=no - ;; - esac -@@ -9759,7 +9541,7 @@ - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility -@@ -9781,24 +9563,24 @@ - esac - fi - -- if test yes = "$lt_use_gnu_ld_interface"; then -+ if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty -- wlarc='$wl' -+ wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -- export_dynamic_flag_spec='$wl--export-dynamic' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' -+ export_dynamic_flag_spec='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then -- whole_archive_flag_spec=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - whole_archive_flag_spec= - fi - supports_anon_versioning=no -- case `$LD -v | $SED -e 's/(^)\+)\s\+//' 2>&1` in -+ case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... -@@ -9811,7 +9593,7 @@ - case $host_os in - aix[3-9]*) - # On AIX/PPC, the GNU linker is very broken -- if test ia64 != "$host_cpu"; then -+ if test "$host_cpu" != ia64; then - ld_shlibs=no - cat <<_LT_EOF 1>&2 - -@@ -9830,7 +9612,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='' - ;; - m68k) -@@ -9846,7 +9628,7 @@ - allow_undefined_flag=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME -- archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - ld_shlibs=no - fi -@@ -9856,7 +9638,7 @@ - # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec='-L$libdir' -- export_dynamic_flag_spec='$wl--export-all-symbols' -+ export_dynamic_flag_spec='${wl}--export-all-symbols' - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -@@ -9864,89 +9646,61 @@ - exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file, use it as -- # is; otherwise, prepend EXPORTS... -- archive_expsym_cmds='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs=no - fi - ;; - - haiku*) -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - link_all_deplibs=yes - ;; - -- os2*) -- hardcode_libdir_flag_spec='-L$libdir' -- hardcode_minus_L=yes -- allow_undefined_flag=unsupported -- shrext_cmds=.dll -- archive_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- archive_expsym_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- old_archive_From_new_cmds='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- enable_shared_with_static_runtimes=yes -- ;; -- - interix[3-9]*) - hardcode_direct=no - hardcode_shlibpath_var=no -- hardcode_libdir_flag_spec='$wl-rpath,$libdir' -- export_dynamic_flag_spec='$wl-E' -+ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ export_dynamic_flag_spec='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -- archive_expsym_cmds='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no -- if test linux-dietlibc = "$host_os"; then -+ if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ -- && test no = "$tmp_diet" -+ && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler -- whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers -- whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; -@@ -9957,47 +9711,42 @@ - lf95*) # Lahey Fortran 8.1 - whole_archive_flag_spec= - tmp_sharedflag='--shared' ;; -- nagfor*) # NAGFOR 5.3 -- tmp_sharedflag='-Wl,-shared' ;; - xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 -- whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 -- whole_archive_flag_spec='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac -- archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in -- tcc*) -- export_dynamic_flag_spec='-rdynamic' -- ;; - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -10011,8 +9760,8 @@ - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -10030,8 +9779,8 @@ - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -10043,7 +9792,7 @@ - ld_shlibs=no - cat <<_LT_EOF 1>&2 - --*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot -+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not - *** reliably create shared libraries on SCO systems. Therefore, libtool - *** is disabling shared libraries support. We urge you to upgrade GNU - *** binutils to release 2.16.91.0.3 or newer. Another option is to modify -@@ -10058,9 +9807,9 @@ - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -10077,15 +9826,15 @@ - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi - ;; - esac - -- if test no = "$ld_shlibs"; then -+ if test "$ld_shlibs" = no; then - runpath_var= - hardcode_libdir_flag_spec= - export_dynamic_flag_spec= -@@ -10101,7 +9850,7 @@ - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - hardcode_minus_L=yes -- if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then -+ if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - hardcode_direct=unsupported -@@ -10109,57 +9858,34 @@ - ;; - - aix[4-9]*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' -- no_entry_flag= -+ no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. -- # -C means demangle to GNU nm, but means don't demangle to AIX nm. -- # Without the "-l" option, or with the "-B" option, AIX nm treats -- # weak defined symbols like other global defined symbols, whereas -- # GNU nm marks them as "W". -- # While the 'weak' keyword is ignored in the Export File, we need -- # it in the Import File for the 'aix-soname' feature, so we have -- # to replace the "-B" option with "-P" for AIX nm. -+ # -C means demangle to AIX nm, but means don't demangle with GNU nm -+ # Also, AIX nm treats weak defined symbols like other global -+ # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then -- export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' -+ export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else -- export_symbols_cmds='`func_echo_all $NM | $SED -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' -+ export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we -- # have runtime linking enabled, and use it for executables. -- # For shared libraries, we enable/disable runtime linking -- # depending on the kind of the shared library created - -- # when "with_aix_soname,aix_use_runtimelinking" is: -- # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables -- # "aix,yes" lib.so shared, rtl:yes, for executables -- # lib.a static archive -- # "both,no" lib.so.V(shr.o) shared, rtl:yes -- # lib.a(lib.so.V) shared, rtl:no, for executables -- # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a(lib.so.V) shared, rtl:no -- # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a static archive -+ # need to do runtime linking. - case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) - for ld_flag in $LDFLAGS; do -- if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then -+ if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done -- if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then -- # With aix-soname=svr4, we create the lib.so.V shared archives only, -- # so we don't have lib.a shared libs to link our executables. -- # We have to force runtime linking in this case. -- aix_use_runtimelinking=yes -- LDFLAGS="$LDFLAGS -Wl,-brtl" -- fi - ;; - esac - -@@ -10178,21 +9904,13 @@ - hardcode_direct_absolute=yes - hardcode_libdir_separator=':' - link_all_deplibs=yes -- file_list_spec='$wl-f,' -- case $with_aix_soname,$aix_use_runtimelinking in -- aix,*) ;; # traditional, no import file -- svr4,* | *,yes) # use import file -- # The Import File defines what to hardcode. -- hardcode_direct=no -- hardcode_direct_absolute=no -- ;; -- esac -+ file_list_spec='${wl}-f,' - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - case $host_os in aix4.[012]|aix4.[012].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ -- collect2name=`$CC -print-prog-name=collect2` -+ collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then -@@ -10211,42 +9929,35 @@ - ;; - esac - shared_flag='-shared' -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag="$shared_flag "'$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag="$shared_flag "'${wl}-G' - fi -- # Need to ensure runtime linking is disabled for the traditional -- # shared library, or the linker may eventually find shared libraries -- # /with/ Import File - we do not want to mix them. -- shared_flag_aix='-shared' -- shared_flag_svr4='-shared $wl-G' - else - # not using gcc -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag='$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag='${wl}-G' - else -- shared_flag='$wl-bM:SRE' -+ shared_flag='${wl}-bM:SRE' - fi -- shared_flag_aix='$wl-bM:SRE' -- shared_flag_svr4='$wl-G' - fi - fi - -- export_dynamic_flag_spec='$wl-bexpall' -+ export_dynamic_flag_spec='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - always_export_symbols=yes -- if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then -+ if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- if test set = "${lt_cv_aix_libpath+set}"; then -+ if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - if ${lt_cv_aix_libpath_+:} false; then : -@@ -10281,7 +9992,7 @@ - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath_"; then -- lt_cv_aix_libpath_=/usr/lib:/lib -+ lt_cv_aix_libpath_="/usr/lib:/lib" - fi - - fi -@@ -10289,17 +10000,17 @@ - aix_libpath=$lt_cv_aix_libpath_ - fi - -- hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath" -- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag -+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" -+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -- if test ia64 = "$host_cpu"; then -- hardcode_libdir_flag_spec='$wl-R $libdir:/usr/lib:/lib' -+ if test "$host_cpu" = ia64; then -+ hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' - allow_undefined_flag="-z nodefs" -- archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" -+ archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. -- if test set = "${lt_cv_aix_libpath+set}"; then -+ if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - if ${lt_cv_aix_libpath_+:} false; then : -@@ -10334,7 +10045,7 @@ - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath_"; then -- lt_cv_aix_libpath_=/usr/lib:/lib -+ lt_cv_aix_libpath_="/usr/lib:/lib" - fi - - fi -@@ -10342,33 +10053,21 @@ - aix_libpath=$lt_cv_aix_libpath_ - fi - -- hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath" -+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -- no_undefined_flag=' $wl-bernotok' -- allow_undefined_flag=' $wl-berok' -- if test yes = "$with_gnu_ld"; then -+ no_undefined_flag=' ${wl}-bernotok' -+ allow_undefined_flag=' ${wl}-berok' -+ if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. -- whole_archive_flag_spec='$wl--whole-archive$convenience $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - whole_archive_flag_spec='$convenience' - fi - archive_cmds_need_lc=yes -- archive_expsym_cmds='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' -- # -brtl affects multiple linker settings, -berok does not and is overridden later -- compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([, ]\\)%-berok\\1%g"`' -- if test svr4 != "$with_aix_soname"; then -- # This is similar to how AIX traditionally builds its shared libraries. -- archive_expsym_cmds="$archive_expsym_cmds"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' -- fi -- if test aix != "$with_aix_soname"; then -- archive_expsym_cmds="$archive_expsym_cmds"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' -- else -- # used by -dlpreopen to get the symbols -- archive_expsym_cmds="$archive_expsym_cmds"'~$MV $output_objdir/$realname.d/$soname $output_objdir' -- fi -- archive_expsym_cmds="$archive_expsym_cmds"'~$RM -r $output_objdir/$realname.d' -+ # This is similar to how AIX traditionally builds its shared libraries. -+ archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; -@@ -10377,7 +10076,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='' - ;; - m68k) -@@ -10407,17 +10106,16 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' -- archive_expsym_cmds='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then -- cp "$export_symbols" "$output_objdir/$soname.def"; -- echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; -- else -- $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; -- fi~ -- $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -- linknames=' -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, )='true' - enable_shared_with_static_runtimes=yes -@@ -10426,18 +10124,18 @@ - # Don't use ranlib - old_postinstall_cmds='chmod 644 $oldlib' - postlink_cmds='lt_outputfile="@OUTPUT@"~ -- lt_tool_outputfile="@TOOL_OUTPUT@"~ -- case $lt_outputfile in -- *.exe|*.EXE) ;; -- *) -- lt_outputfile=$lt_outputfile.exe -- lt_tool_outputfile=$lt_tool_outputfile.exe -- ;; -- esac~ -- if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then -- $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -- $RM "$lt_outputfile.manifest"; -- fi' -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' - ;; - *) - # Assume MSVC wrapper -@@ -10446,7 +10144,7 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. -@@ -10465,24 +10163,24 @@ - hardcode_direct=no - hardcode_automatic=yes - hardcode_shlibpath_var=unsupported -- if test yes = "$lt_cv_ld_force_load"; then -- whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' -+ if test "$lt_cv_ld_force_load" = "yes"; then -+ whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - - else - whole_archive_flag_spec='' - fi - link_all_deplibs=yes -- allow_undefined_flag=$_lt_dar_allow_undefined -+ allow_undefined_flag="$_lt_dar_allow_undefined" - case $cc_basename in -- ifort*|nagfor*) _lt_dar_can_shared=yes ;; -+ ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac -- if test yes = "$_lt_dar_can_shared"; then -+ if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all -- archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" -- module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" -- archive_expsym_cmds="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" -- module_expsym_cmds="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" -+ archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" -+ module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" -+ archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" -+ module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - - else - ld_shlibs=no -@@ -10524,33 +10222,33 @@ - ;; - - hpux9*) -- if test yes = "$GCC"; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ if test "$GCC" = yes; then -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else -- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -- hardcode_libdir_flag_spec='$wl+b $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_direct=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes -- export_dynamic_flag_spec='$wl-E' -+ export_dynamic_flag_spec='${wl}-E' - ;; - - hpux10*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -- archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -- if test no = "$with_gnu_ld"; then -- hardcode_libdir_flag_spec='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_direct=yes - hardcode_direct_absolute=yes -- export_dynamic_flag_spec='$wl-E' -+ export_dynamic_flag_spec='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes -@@ -10558,25 +10256,25 @@ - ;; - - hpux11*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) -- archive_cmds='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) -- archive_cmds='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - -@@ -10588,7 +10286,7 @@ - $as_echo_n "(cached) " >&6 - else - lt_cv_prog_compiler__b=no -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -b" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then -@@ -10607,14 +10305,14 @@ - fi - fi - $RM -r conftest* -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 - $as_echo "$lt_cv_prog_compiler__b" >&6; } - --if test yes = "$lt_cv_prog_compiler__b"; then -- archive_cmds='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+if test x"$lt_cv_prog_compiler__b" = xyes; then -+ archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -10622,8 +10320,8 @@ - ;; - esac - fi -- if test no = "$with_gnu_ld"; then -- hardcode_libdir_flag_spec='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - - case $host_cpu in -@@ -10634,7 +10332,7 @@ - *) - hardcode_direct=yes - hardcode_direct_absolute=yes -- export_dynamic_flag_spec='$wl-E' -+ export_dynamic_flag_spec='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. -@@ -10645,8 +10343,8 @@ - ;; - - irix5* | irix6* | nonstopux*) -- if test yes = "$GCC"; then -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -@@ -10656,8 +10354,8 @@ - if ${lt_cv_irix_exported_symbol+:} false; then : - $as_echo_n "(cached) " >&6 - else -- save_LDFLAGS=$LDFLAGS -- LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - int foo (void) { return 0; } -@@ -10669,34 +10367,24 @@ - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 - $as_echo "$lt_cv_irix_exported_symbol" >&6; } -- if test yes = "$lt_cv_irix_exported_symbol"; then -- archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' -+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - archive_cmds_need_lc='no' -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - inherit_rpath=yes - link_all_deplibs=yes - ;; - -- linux*) -- case $cc_basename in -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- ld_shlibs=yes -- archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- ;; -- esac -- ;; -- - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out -@@ -10711,7 +10399,7 @@ - newsos6) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=yes -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_shlibpath_var=no - ;; -@@ -10719,19 +10407,27 @@ - *nto* | *qnx*) - ;; - -- openbsd* | bitrig*) -+ openbsd*) - if test -f /usr/libexec/ld.so; then - hardcode_direct=yes - hardcode_shlibpath_var=no - hardcode_direct_absolute=yes -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' -- hardcode_libdir_flag_spec='$wl-rpath,$libdir' -- export_dynamic_flag_spec='$wl-E' -+ archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' -+ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ export_dynamic_flag_spec='${wl}-E' - else -- archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- hardcode_libdir_flag_spec='$wl-rpath,$libdir' -+ case $host_os in -+ openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) -+ archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' -+ hardcode_libdir_flag_spec='-R$libdir' -+ ;; -+ *) -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -+ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ ;; -+ esac - fi - else - ld_shlibs=no -@@ -10742,53 +10438,33 @@ - hardcode_libdir_flag_spec='-L$libdir' - hardcode_minus_L=yes - allow_undefined_flag=unsupported -- shrext_cmds=.dll -- archive_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- archive_expsym_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- old_archive_From_new_cmds='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- enable_shared_with_static_runtimes=yes -+ archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' -+ old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) -- if test yes = "$GCC"; then -- allow_undefined_flag=' $wl-expect_unresolved $wl\*' -- archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -+ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - allow_undefined_flag=' -expect_unresolved \*' -- archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - archive_cmds_need_lc='no' -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag -- if test yes = "$GCC"; then -- allow_undefined_flag=' $wl-expect_unresolved $wl\*' -- archive_cmds='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ if test "$GCC" = yes; then -+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -- archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ -- $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' -+ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - hardcode_libdir_flag_spec='-rpath $libdir' -@@ -10799,24 +10475,24 @@ - - solaris*) - no_undefined_flag=' -z defs' -- if test yes = "$GCC"; then -- wlarc='$wl' -- archive_cmds='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ wlarc='${wl}' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' -- archive_cmds='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' -+ archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' -+ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) -- wlarc='$wl' -- archive_cmds='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ wlarc='${wl}' -+ archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi -@@ -10826,11 +10502,11 @@ - solaris2.[0-5] | solaris2.[0-5].*) ;; - *) - # The compiler driver will combine and reorder linker options, -- # but understands '-z linker_flag'. GCC discards it without '$wl', -+ # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) -- if test yes = "$GCC"; then -- whole_archive_flag_spec='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' -+ if test "$GCC" = yes; then -+ whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - whole_archive_flag_spec='-z allextract$convenience -z defaultextract' - fi -@@ -10840,10 +10516,10 @@ - ;; - - sunos4*) -- if test sequent = "$host_vendor"; then -+ if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. -- archive_cmds='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -10892,43 +10568,43 @@ - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) -- no_undefined_flag='$wl-z,text' -+ no_undefined_flag='${wl}-z,text' - archive_cmds_need_lc=no - hardcode_shlibpath_var=no - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- archive_cmds='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- archive_cmds='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) -- # Note: We CANNOT use -z defs as we might desire, because we do not -+ # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. -- no_undefined_flag='$wl-z,text' -- allow_undefined_flag='$wl-z,nodefs' -+ no_undefined_flag='${wl}-z,text' -+ allow_undefined_flag='${wl}-z,nodefs' - archive_cmds_need_lc=no - hardcode_shlibpath_var=no -- hardcode_libdir_flag_spec='$wl-R,$libdir' -+ hardcode_libdir_flag_spec='${wl}-R,$libdir' - hardcode_libdir_separator=':' - link_all_deplibs=yes -- export_dynamic_flag_spec='$wl-Bexport' -+ export_dynamic_flag_spec='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- archive_cmds='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- archive_cmds='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - -@@ -10943,10 +10619,10 @@ - ;; - esac - -- if test sni = "$host_vendor"; then -+ if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) -- export_dynamic_flag_spec='$wl-Blargedynsym' -+ export_dynamic_flag_spec='${wl}-Blargedynsym' - ;; - esac - fi -@@ -10954,7 +10630,7 @@ - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 - $as_echo "$ld_shlibs" >&6; } --test no = "$ld_shlibs" && can_build_shared=no -+test "$ld_shlibs" = no && can_build_shared=no - - with_gnu_ld=$with_gnu_ld - -@@ -10980,7 +10656,7 @@ - # Assume -lc should be added - archive_cmds_need_lc=yes - -- if test yes,yes = "$GCC,$enable_shared"; then -+ if test "$enable_shared" = yes && test "$GCC" = yes; then - case $archive_cmds in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. -@@ -11195,14 +10871,14 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 - $as_echo_n "checking dynamic linker characteristics... " >&6; } - --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - case $host_os in -- darwin*) lt_awk_arg='/^libraries:/,/LR/' ;; -- *) lt_awk_arg='/^libraries:/' ;; -+ darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; -+ *) lt_awk_arg="/^libraries:/" ;; - esac - case $host_os in -- mingw* | cegcc*) lt_sed_strip_eq='s|=\([A-Za-z]:\)|\1|g' ;; -- *) lt_sed_strip_eq='s|=/|/|g' ;; -+ mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;; -+ *) lt_sed_strip_eq="s,=/,/,g" ;; - esac - lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` - case $lt_search_path_spec in -@@ -11218,35 +10894,28 @@ - ;; - esac - # Ok, now we have the path, separated by spaces, we can step through it -- # and add multilib dir if necessary... -+ # and add multilib dir if necessary. - lt_tmp_lt_search_path_spec= -- lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` -- # ...but if some path component already ends with the multilib dir we assume -- # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer). -- case "$lt_multi_os_dir; $lt_search_path_spec " in -- "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*) -- lt_multi_os_dir= -- ;; -- esac -+ lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` - for lt_sys_path in $lt_search_path_spec; do -- if test -d "$lt_sys_path$lt_multi_os_dir"; then -- lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir" -- elif test -n "$lt_multi_os_dir"; then -+ if test -d "$lt_sys_path/$lt_multi_os_dir"; then -+ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" -+ else - test -d "$lt_sys_path" && \ - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" - fi - done - lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' --BEGIN {RS = " "; FS = "/|\n";} { -- lt_foo = ""; -- lt_count = 0; -+BEGIN {RS=" "; FS="/|\n";} { -+ lt_foo=""; -+ lt_count=0; - for (lt_i = NF; lt_i > 0; lt_i--) { - if ($lt_i != "" && $lt_i != ".") { - if ($lt_i == "..") { - lt_count++; - } else { - if (lt_count == 0) { -- lt_foo = "/" $lt_i lt_foo; -+ lt_foo="/" $lt_i lt_foo; - } else { - lt_count--; - } -@@ -11260,7 +10929,7 @@ - # for these hosts. - case $host_os in - mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ -- $SED 's|/\([A-Za-z]:\)|\1|g'` ;; -+ $SED 's,/\([A-Za-z]:\),\1,g'` ;; - esac - sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` - else -@@ -11269,7 +10938,7 @@ - library_names_spec= - libname_spec='lib$name' - soname_spec= --shrext_cmds=.so -+shrext_cmds=".so" - postinstall_cmds= - postuninstall_cmds= - finish_cmds= -@@ -11286,16 +10955,14 @@ - # flags to be left without arguments - need_version=unknown - -- -- - case $host_os in - aix3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname.a' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. -- soname_spec='$libname$release$shared_ext$major' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - - aix[4-9]*) -@@ -11303,91 +10970,41 @@ - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 -- library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with -- # the line '#! .'. This would cause the generated library to -- # depend on '.', always an invalid library. This was fixed in -+ # the line `#! .'. This would cause the generated library to -+ # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[01] | aix4.[01].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' -- echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then -+ echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac -- # Using Import Files as archive members, it is possible to support -- # filename-based versioning of shared library archives on AIX. While -- # this would work for both with and without runtime linking, it will -- # prevent static linking of such archives. So we do filename-based -- # shared library versioning with .so extension only, which is used -- # when both runtime linking and shared linking is enabled. -- # Unfortunately, runtime linking may impact performance, so we do -- # not want this to be the default eventually. Also, we use the -- # versioned .so libs for executables only if there is the -brtl -- # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. -- # To allow for filename-based versioning support, we need to create -- # libNAME.so.V as an archive file, containing: -- # *) an Import File, referring to the versioned filename of the -- # archive as well as the shared archive member, telling the -- # bitwidth (32 or 64) of that shared object, and providing the -- # list of exported symbols of that shared object, eventually -- # decorated with the 'weak' keyword -- # *) the shared object with the F_LOADONLY flag set, to really avoid -- # it being seen by the linker. -- # At run time we better use the real file rather than another symlink, -- # but for link time we create the symlink libNAME.so -> libNAME.so.V -- -- case $with_aix_soname,$aix_use_runtimelinking in -- # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct -+ # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. -- aix,yes) # traditional libtool -- dynamic_linker='AIX unversionable lib.so' -+ if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib<name>.so - # instead of lib<name>.a to let people know that these are not - # typical AIX shared libraries. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- ;; -- aix,no) # traditional AIX only -- dynamic_linker='AIX lib.a(lib.so.V)' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- ;; -- svr4,*) # full svr4 only -- dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o)" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,yes) # both, prefer svr4 -- dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o), lib.a(lib.so.V)" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # unpreferred sharedlib libNAME.a needs extra handling -- postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' -- postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,no) # both, prefer aix -- dynamic_linker="AIX lib.a(lib.so.V), lib.so.V($shared_archive_member_spec.o)" -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling -- postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' -- postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' -- ;; -- esac -+ library_names_spec='${libname}${release}.a $libname.a' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ fi - shlibpath_var=LIBPATH - fi - ;; -@@ -11397,18 +11014,18 @@ - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. -- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' -+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - - beos*) -- library_names_spec='$libname$shared_ext' -+ library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; -@@ -11416,8 +11033,8 @@ - bsdi[45]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" -@@ -11429,7 +11046,7 @@ - - cygwin* | mingw* | pw32* | cegcc*) - version_type=windows -- shrext_cmds=.dll -+ shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - -@@ -11438,8 +11055,8 @@ - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ -@@ -11455,17 +11072,17 @@ - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' -- soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix -- soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' -- library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' -@@ -11474,8 +11091,8 @@ - *,cl*) - # Native MSVC - libname_spec='$name' -- soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -- library_names_spec='$libname.dll.lib' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) -@@ -11502,7 +11119,7 @@ - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) -- sys_lib_search_path_spec=$LIB -+ sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -@@ -11515,8 +11132,8 @@ - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' -@@ -11529,7 +11146,7 @@ - - *) - # Assume MSVC wrapper -- library_names_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext $libname.lib' -+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac -@@ -11542,8 +11159,8 @@ - version_type=darwin - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$major$shared_ext' -+ library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' -+ soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' -@@ -11556,8 +11173,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -11575,13 +11192,12 @@ - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac -@@ -11606,15 +11222,26 @@ - esac - ;; - -+gnu*) -+ version_type=linux # correct to gnu/linux during the next big refactor -+ need_lib_prefix=no -+ need_version=no -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ shlibpath_var=LD_LIBRARY_PATH -+ shlibpath_overrides_runpath=no -+ hardcode_into_libs=yes -+ ;; -+ - haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH -- shlibpath_overrides_runpath=no -+ shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; -@@ -11632,15 +11259,14 @@ - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -- if test 32 = "$HPUX_IA64_MODE"; then -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux32 - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux64 - fi -+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' -@@ -11648,8 +11274,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; -@@ -11658,8 +11284,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... -@@ -11672,8 +11298,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -11684,7 +11310,7 @@ - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix -@@ -11692,8 +11318,8 @@ - esac - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= -@@ -11712,8 +11338,8 @@ - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no -- sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" -- sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" -+ sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" -+ sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -@@ -11722,33 +11348,13 @@ - dynamic_linker=no - ;; - --linux*android*) -- version_type=none # Android doesn't support versioned libraries. -- need_lib_prefix=no -- need_version=no -- library_names_spec='$libname$release$shared_ext' -- soname_spec='$libname$release$shared_ext' -- finish_cmds= -- shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -- -- # This implies no fast_install, which is unacceptable. -- # Some rework will be needed to allow for fast_install -- # before this can be enabled. -- hardcode_into_libs=yes -- -- dynamic_linker='Android linker' -- # Don't embed -rpath directories since the linker doesn't support them. -- hardcode_libdir_flag_spec='-L$libdir' -- ;; -- - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -11792,15 +11398,14 @@ - # before this can be enabled. - hardcode_into_libs=yes - -- # Ideally, we could use ldconfig to report *all* directores which are -- # searched for libraries, however this is still not possible. Aside from not -- # being certain /sbin/ldconfig is available, command -- # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, -- # even though it is searched at run-time. Try to do the best guess by -- # appending ld.so.conf contents (and includes) to the search path. -+ # Add ABI-specific directories to the system library path. -+ sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" -+ -+ # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` -- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" -+ sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" -+ - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on -@@ -11817,12 +11422,12 @@ - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH -@@ -11832,7 +11437,7 @@ - - newsos6) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; -@@ -11841,68 +11446,58 @@ - version_type=qnx - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - --openbsd* | bitrig*) -+openbsd*) - version_type=sunos -- sys_lib_dlsearch_path_spec=/usr/lib -+ sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -- need_version=no -- else -- need_version=yes -- fi -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. -+ case $host_os in -+ openbsd3.3 | openbsd3.3.*) need_version=yes ;; -+ *) need_version=no ;; -+ esac -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ case $host_os in -+ openbsd2.[89] | openbsd2.[89].*) -+ shlibpath_overrides_runpath=no -+ ;; -+ *) -+ shlibpath_overrides_runpath=yes -+ ;; -+ esac -+ else -+ shlibpath_overrides_runpath=yes -+ fi - ;; - - os2*) - libname_spec='$name' -- version_type=windows -- shrext_cmds=.dll -- need_version=no -+ shrext_cmds=".dll" - need_lib_prefix=no -- # OS/2 can only load a DLL with a base name of 8 characters or less. -- soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; -- v=$($ECHO $release$versuffix | tr -d .-); -- n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); -- $ECHO $n$v`$shared_ext' -- library_names_spec='${libname}_dll.$libext' -+ library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' -- shlibpath_var=BEGINLIBPATH -- sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ -- dldir=$destdir/`dirname \$dlpath`~ -- test -d \$dldir || mkdir -p \$dldir~ -- $install_prog $dir/$dlname \$dldir/$dlname~ -- chmod a+x \$dldir/$dlname~ -- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then -- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; -- fi' -- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ -- dlpath=$dir/\$dldll~ -- $RM \$dlpath' -+ shlibpath_var=LIBPATH - ;; - - osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -+ sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - - rdos*) -@@ -11913,8 +11508,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -@@ -11924,11 +11519,11 @@ - - sunos4*) - version_type=sunos -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes -@@ -11936,8 +11531,8 @@ - - sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) -@@ -11958,24 +11553,24 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' -- soname_spec='$libname$shared_ext.$major' -+ library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' -+ soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) -- version_type=sco -+ version_type=freebsd-elf - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' -@@ -11993,7 +11588,7 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes -@@ -12001,8 +11596,8 @@ - - uts4*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -12012,35 +11607,20 @@ - esac - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 - $as_echo "$dynamic_linker" >&6; } --test no = "$dynamic_linker" && can_build_shared=no -+test "$dynamic_linker" = no && can_build_shared=no - - variables_saved_for_relink="PATH $shlibpath_var $runpath_var" --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" - fi - --if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then -- sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec -+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then -+ sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" - fi -- --if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then -- sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec -+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then -+ sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" - fi - --# remember unaugmented sys_lib_dlsearch_path content for libtool script decls... --configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec -- --# ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code --func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" -- --# to be used as default LT_SYS_LIBRARY_PATH value in generated libtool --configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH -- -- -- -- -- -- - - - -@@ -12137,15 +11717,15 @@ - hardcode_action= - if test -n "$hardcode_libdir_flag_spec" || - test -n "$runpath_var" || -- test yes = "$hardcode_automatic"; then -+ test "X$hardcode_automatic" = "Xyes" ; then - - # We can hardcode non-existent directories. -- if test no != "$hardcode_direct" && -+ if test "$hardcode_direct" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one -- ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, )" && -- test no != "$hardcode_minus_L"; then -+ ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && -+ test "$hardcode_minus_L" != no; then - # Linking always hardcodes the temporary library directory. - hardcode_action=relink - else -@@ -12160,12 +11740,12 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 - $as_echo "$hardcode_action" >&6; } - --if test relink = "$hardcode_action" || -- test yes = "$inherit_rpath"; then -+if test "$hardcode_action" = relink || -+ test "$inherit_rpath" = yes; then - # Fast installation is not supported - enable_fast_install=no --elif test yes = "$shlibpath_overrides_runpath" || -- test no = "$enable_shared"; then -+elif test "$shlibpath_overrides_runpath" = yes || -+ test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless - fi -@@ -12175,7 +11755,7 @@ - - - -- if test yes != "$enable_dlopen"; then -+ if test "x$enable_dlopen" != xyes; then - enable_dlopen=unknown - enable_dlopen_self=unknown - enable_dlopen_self_static=unknown -@@ -12185,23 +11765,23 @@ - - case $host_os in - beos*) -- lt_cv_dlopen=load_add_on -+ lt_cv_dlopen="load_add_on" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ;; - - mingw* | pw32* | cegcc*) -- lt_cv_dlopen=LoadLibrary -+ lt_cv_dlopen="LoadLibrary" - lt_cv_dlopen_libs= - ;; - - cygwin*) -- lt_cv_dlopen=dlopen -+ lt_cv_dlopen="dlopen" - lt_cv_dlopen_libs= - ;; - - darwin*) -- # if libdl is installed we need to link against it -+ # if libdl is installed we need to link against it - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 - $as_echo_n "checking for dlopen in -ldl... " >&6; } - if ${ac_cv_lib_dl_dlopen+:} false; then : -@@ -12239,10 +11819,10 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 - $as_echo "$ac_cv_lib_dl_dlopen" >&6; } - if test "x$ac_cv_lib_dl_dlopen" = xyes; then : -- lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl -+ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" - else - -- lt_cv_dlopen=dyld -+ lt_cv_dlopen="dyld" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - -@@ -12250,18 +11830,10 @@ - - ;; - -- tpf*) -- # Don't try to run any link tests for TPF. We know it's impossible -- # because TPF is a cross-compiler, and we know how we open DSOs. -- lt_cv_dlopen=dlopen -- lt_cv_dlopen_libs= -- lt_cv_dlopen_self=no -- ;; -- - *) - ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" - if test "x$ac_cv_func_shl_load" = xyes; then : -- lt_cv_dlopen=shl_load -+ lt_cv_dlopen="shl_load" - else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 - $as_echo_n "checking for shl_load in -ldld... " >&6; } -@@ -12300,11 +11872,11 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 - $as_echo "$ac_cv_lib_dld_shl_load" >&6; } - if test "x$ac_cv_lib_dld_shl_load" = xyes; then : -- lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld -+ lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" - else - ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" - if test "x$ac_cv_func_dlopen" = xyes; then : -- lt_cv_dlopen=dlopen -+ lt_cv_dlopen="dlopen" - else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 - $as_echo_n "checking for dlopen in -ldl... " >&6; } -@@ -12343,7 +11915,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 - $as_echo "$ac_cv_lib_dl_dlopen" >&6; } - if test "x$ac_cv_lib_dl_dlopen" = xyes; then : -- lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl -+ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" - else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 - $as_echo_n "checking for dlopen in -lsvld... " >&6; } -@@ -12382,7 +11954,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 - $as_echo "$ac_cv_lib_svld_dlopen" >&6; } - if test "x$ac_cv_lib_svld_dlopen" = xyes; then : -- lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld -+ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" - else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 - $as_echo_n "checking for dld_link in -ldld... " >&6; } -@@ -12421,7 +11993,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 - $as_echo "$ac_cv_lib_dld_dld_link" >&6; } - if test "x$ac_cv_lib_dld_dld_link" = xyes; then : -- lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld -+ lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" - fi - - -@@ -12442,21 +12014,21 @@ - ;; - esac - -- if test no = "$lt_cv_dlopen"; then -- enable_dlopen=no -- else -+ if test "x$lt_cv_dlopen" != xno; then - enable_dlopen=yes -+ else -+ enable_dlopen=no - fi - - case $lt_cv_dlopen in - dlopen) -- save_CPPFLAGS=$CPPFLAGS -- test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" -+ save_CPPFLAGS="$CPPFLAGS" -+ test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" - -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" - -- save_LIBS=$LIBS -+ save_LIBS="$LIBS" - LIBS="$lt_cv_dlopen_libs $LIBS" - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 -@@ -12464,7 +12036,7 @@ - if ${lt_cv_dlopen_self+:} false; then : - $as_echo_n "(cached) " >&6 - else -- if test yes = "$cross_compiling"; then : -+ if test "$cross_compiling" = yes; then : - lt_cv_dlopen_self=cross - else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 -@@ -12511,9 +12083,9 @@ - # endif - #endif - --/* When -fvisibility=hidden is used, assume the code has been annotated -+/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ --#if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) - int fnord () __attribute__((visibility("default"))); - #endif - -@@ -12543,7 +12115,7 @@ - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -- test $ac_status = 0; } && test -s "conftest$ac_exeext" 2>/dev/null; then -+ test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&5 2>/dev/null - lt_status=$? - case x$lt_status in -@@ -12563,14 +12135,14 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 - $as_echo "$lt_cv_dlopen_self" >&6; } - -- if test yes = "$lt_cv_dlopen_self"; then -+ if test "x$lt_cv_dlopen_self" = xyes; then - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 - $as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } - if ${lt_cv_dlopen_self_static+:} false; then : - $as_echo_n "(cached) " >&6 - else -- if test yes = "$cross_compiling"; then : -+ if test "$cross_compiling" = yes; then : - lt_cv_dlopen_self_static=cross - else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 -@@ -12617,9 +12189,9 @@ - # endif - #endif - --/* When -fvisibility=hidden is used, assume the code has been annotated -+/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ --#if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) - int fnord () __attribute__((visibility("default"))); - #endif - -@@ -12649,7 +12221,7 @@ - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -- test $ac_status = 0; } && test -s "conftest$ac_exeext" 2>/dev/null; then -+ test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&5 2>/dev/null - lt_status=$? - case x$lt_status in -@@ -12670,9 +12242,9 @@ - $as_echo "$lt_cv_dlopen_self_static" >&6; } - fi - -- CPPFLAGS=$save_CPPFLAGS -- LDFLAGS=$save_LDFLAGS -- LIBS=$save_LIBS -+ CPPFLAGS="$save_CPPFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+ LIBS="$save_LIBS" - ;; - esac - -@@ -12716,7 +12288,7 @@ - # FIXME - insert some real tests, host_os isn't really good enough - case $host_os in - darwin*) -- if test -n "$STRIP"; then -+ if test -n "$STRIP" ; then - striplib="$STRIP -x" - old_striplib="$STRIP -S" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -@@ -12744,7 +12316,7 @@ - - - -- # Report what library types will actually be built -+ # Report which library types will actually be built - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 - $as_echo_n "checking if libtool supports shared libraries... " >&6; } - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 -@@ -12752,13 +12324,13 @@ - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 - $as_echo_n "checking whether to build shared libraries... " >&6; } -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' -@@ -12766,12 +12338,8 @@ - ;; - - aix[4-9]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -12781,7 +12349,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 - $as_echo_n "checking whether to build static libraries... " >&6; } - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 - $as_echo "$enable_static" >&6; } - -@@ -12795,7 +12363,7 @@ - ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' - ac_compiler_gnu=$ac_cv_c_compiler_gnu - --CC=$lt_save_CC -+CC="$lt_save_CC" - - - -@@ -13266,6 +12834,12 @@ - fi - - -+# Check whether --with-ime was given. -+if test "${with_ime+set}" = set; then : -+ withval=$with_ime; -+fi -+ -+ - # Check whether --with-mpi was given. - if test "${with_mpi+set}" = set; then : - withval=$with_mpi; -@@ -13956,7 +13530,7 @@ - ac_link='$F77 -o conftest$ac_exeext $FFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' - ac_compiler_gnu=$ac_cv_f77_compiler_gnu - --if test -z "$F77" || test no = "$F77"; then -+if test -z "$F77" || test "X$F77" = "Xno"; then - _lt_disable_F77=yes - fi - -@@ -13993,7 +13567,7 @@ - # the F77 compiler isn't working. Some variables (like enable_shared) - # are currently assumed to apply to all compilers on this platform, - # and will be corrupted by setting them based on a non-working compiler. --if test yes != "$_lt_disable_F77"; then -+if test "$_lt_disable_F77" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="\ - subroutine t -@@ -14039,15 +13613,22 @@ - - - # Allow CC to be a program name with arguments. -- lt_save_CC=$CC -+ lt_save_CC="$CC" - lt_save_GCC=$GCC - lt_save_CFLAGS=$CFLAGS - CC=${F77-"f77"} - CFLAGS=$FFLAGS - compiler=$CC - compiler_F77=$CC -- func_cc_basename $compiler --cc_basename=$func_cc_basename_result -+ for cc_temp in $compiler""; do -+ case $cc_temp in -+ compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -+ distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -+ \-*) ;; -+ *) break;; -+ esac -+done -+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - - GCC=$G77 - if test -n "$compiler"; then -@@ -14058,25 +13639,21 @@ - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 - $as_echo_n "checking whether to build shared libraries... " >&6; } -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - aix[4-9]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -14086,12 +13663,12 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 - $as_echo_n "checking whether to build static libraries... " >&6; } - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 - $as_echo "$enable_static" >&6; } - -- GCC_F77=$G77 -- LD_F77=$LD -+ GCC_F77="$G77" -+ LD_F77="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change -@@ -14102,18 +13679,17 @@ - lt_prog_compiler_static_F77= - - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - lt_prog_compiler_wl_F77='-Wl,' - lt_prog_compiler_static_F77='-static' - - case $host_os in - aix*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static_F77='-Bstatic' - fi -- lt_prog_compiler_pic_F77='-fPIC' - ;; - - amigaos*) -@@ -14124,8 +13700,8 @@ - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but -- # adding the '-m68020' flag to GCC prevents building anything better, -- # like '-m68040'. -+ # adding the `-m68020' flag to GCC prevents building anything better, -+ # like `-m68040'. - lt_prog_compiler_pic_F77='-m68020 -resident32 -malways-restore-a4' - ;; - esac -@@ -14141,11 +13717,6 @@ - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - lt_prog_compiler_pic_F77='-DDLL_EXPORT' -- case $host_os in -- os2*) -- lt_prog_compiler_static_F77='$wl-static' -- ;; -- esac - ;; - - darwin* | rhapsody*) -@@ -14216,7 +13787,7 @@ - case $host_os in - aix*) - lt_prog_compiler_wl_F77='-Wl,' -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static_F77='-Bstatic' - else -@@ -14224,29 +13795,10 @@ - fi - ;; - -- darwin* | rhapsody*) -- # PIC is the default on this platform -- # Common symbols not allowed in MH_DYLIB files -- lt_prog_compiler_pic_F77='-fno-common' -- case $cc_basename in -- nagfor*) -- # NAG Fortran compiler -- lt_prog_compiler_wl_F77='-Wl,-Wl,,' -- lt_prog_compiler_pic_F77='-PIC' -- lt_prog_compiler_static_F77='-Bstatic' -- ;; -- esac -- ;; -- - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - lt_prog_compiler_pic_F77='-DDLL_EXPORT' -- case $host_os in -- os2*) -- lt_prog_compiler_static_F77='$wl-static' -- ;; -- esac - ;; - - hpux9* | hpux10* | hpux11*) -@@ -14262,7 +13814,7 @@ - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? -- lt_prog_compiler_static_F77='$wl-a ${wl}archive' -+ lt_prog_compiler_static_F77='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) -@@ -14271,9 +13823,9 @@ - lt_prog_compiler_static_F77='-non_shared' - ;; - -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in -- # old Intel for x86_64, which still supported -KPIC. -+ # old Intel for x86_64 which still supported -KPIC. - ecc*) - lt_prog_compiler_wl_F77='-Wl,' - lt_prog_compiler_pic_F77='-KPIC' -@@ -14298,12 +13850,6 @@ - lt_prog_compiler_pic_F77='-PIC' - lt_prog_compiler_static_F77='-Bstatic' - ;; -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- lt_prog_compiler_wl_F77='-Wl,' -- lt_prog_compiler_pic_F77='-fPIC' -- lt_prog_compiler_static_F77='-static' -- ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -14401,7 +13947,7 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - lt_prog_compiler_pic_F77='-Kconform_pic' - lt_prog_compiler_static_F77='-Bstatic' - fi -@@ -14430,7 +13976,7 @@ - fi - - case $host_os in -- # For platforms that do not support PIC, -DPIC is meaningless: -+ # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - lt_prog_compiler_pic_F77= - ;; -@@ -14462,7 +14008,7 @@ - lt_cv_prog_compiler_pic_works_F77=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext -- lt_compiler_flag="$lt_prog_compiler_pic_F77" ## exclude from sc_useless_quotes_in_assignment -+ lt_compiler_flag="$lt_prog_compiler_pic_F77" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins -@@ -14492,7 +14038,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_F77" >&5 - $as_echo "$lt_cv_prog_compiler_pic_works_F77" >&6; } - --if test yes = "$lt_cv_prog_compiler_pic_works_F77"; then -+if test x"$lt_cv_prog_compiler_pic_works_F77" = xyes; then - case $lt_prog_compiler_pic_F77 in - "" | " "*) ;; - *) lt_prog_compiler_pic_F77=" $lt_prog_compiler_pic_F77" ;; -@@ -14518,7 +14064,7 @@ - $as_echo_n "(cached) " >&6 - else - lt_cv_prog_compiler_static_works_F77=no -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $lt_tmp_static_flag" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then -@@ -14537,13 +14083,13 @@ - fi - fi - $RM -r conftest* -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_F77" >&5 - $as_echo "$lt_cv_prog_compiler_static_works_F77" >&6; } - --if test yes = "$lt_cv_prog_compiler_static_works_F77"; then -+if test x"$lt_cv_prog_compiler_static_works_F77" = xyes; then - : - else - lt_prog_compiler_static_F77= -@@ -14657,8 +14203,8 @@ - - - --hard_links=nottested --if test no = "$lt_cv_prog_compiler_c_o_F77" && test no != "$need_locks"; then -+hard_links="nottested" -+if test "$lt_cv_prog_compiler_c_o_F77" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 - $as_echo_n "checking if we can lock with hard links... " >&6; } -@@ -14670,9 +14216,9 @@ - ln conftest.a conftest.b 2>/dev/null && hard_links=no - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 - $as_echo "$hard_links" >&6; } -- if test no = "$hard_links"; then -- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&5 --$as_echo "$as_me: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&2;} -+ if test "$hard_links" = no; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -+$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} - need_locks=warn - fi - else -@@ -14712,9 +14258,9 @@ - # included in the symbol list - include_expsyms_F77= - # exclude_expsyms can be an extended regexp of symbols to exclude -- # it will be wrapped by ' (' and ')$', so one must not match beginning or -- # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', -- # as well as any symbol that contains 'd'. -+ # it will be wrapped by ` (' and `)$', so one must not match beginning or -+ # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', -+ # as well as any symbol that contains `d'. - exclude_expsyms_F77='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if -@@ -14729,7 +14275,7 @@ - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; -@@ -14737,7 +14283,7 @@ - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; -- openbsd* | bitrig*) -+ openbsd*) - with_gnu_ld=no - ;; - esac -@@ -14747,7 +14293,7 @@ - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility -@@ -14769,24 +14315,24 @@ - esac - fi - -- if test yes = "$lt_use_gnu_ld_interface"; then -+ if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty -- wlarc='$wl' -+ wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH -- hardcode_libdir_flag_spec_F77='$wl-rpath $wl$libdir' -- export_dynamic_flag_spec_F77='$wl--export-dynamic' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' -+ export_dynamic_flag_spec_F77='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then -- whole_archive_flag_spec_F77=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ whole_archive_flag_spec_F77="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - whole_archive_flag_spec_F77= - fi - supports_anon_versioning=no -- case `$LD -v | $SED -e 's/(^)\+)\s\+//' 2>&1` in -+ case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... -@@ -14799,7 +14345,7 @@ - case $host_os in - aix[3-9]*) - # On AIX/PPC, the GNU linker is very broken -- if test ia64 != "$host_cpu"; then -+ if test "$host_cpu" != ia64; then - ld_shlibs_F77=no - cat <<_LT_EOF 1>&2 - -@@ -14818,7 +14364,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_F77='' - ;; - m68k) -@@ -14834,7 +14380,7 @@ - allow_undefined_flag_F77=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME -- archive_cmds_F77='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_F77='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - ld_shlibs_F77=no - fi -@@ -14844,7 +14390,7 @@ - # _LT_TAGVAR(hardcode_libdir_flag_spec, F77) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec_F77='-L$libdir' -- export_dynamic_flag_spec_F77='$wl--export-all-symbols' -+ export_dynamic_flag_spec_F77='${wl}--export-all-symbols' - allow_undefined_flag_F77=unsupported - always_export_symbols_F77=no - enable_shared_with_static_runtimes_F77=yes -@@ -14852,89 +14398,61 @@ - exclude_expsyms_F77='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file, use it as -- # is; otherwise, prepend EXPORTS... -- archive_expsym_cmds_F77='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ archive_expsym_cmds_F77='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs_F77=no - fi - ;; - - haiku*) -- archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - link_all_deplibs_F77=yes - ;; - -- os2*) -- hardcode_libdir_flag_spec_F77='-L$libdir' -- hardcode_minus_L_F77=yes -- allow_undefined_flag_F77=unsupported -- shrext_cmds=.dll -- archive_cmds_F77='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- archive_expsym_cmds_F77='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- old_archive_From_new_cmds_F77='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- enable_shared_with_static_runtimes_F77=yes -- ;; -- - interix[3-9]*) - hardcode_direct_F77=no - hardcode_shlibpath_var_F77=no -- hardcode_libdir_flag_spec_F77='$wl-rpath,$libdir' -- export_dynamic_flag_spec_F77='$wl-E' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath,$libdir' -+ export_dynamic_flag_spec_F77='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. -- archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -- archive_expsym_cmds_F77='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ archive_expsym_cmds_F77='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no -- if test linux-dietlibc = "$host_os"; then -+ if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ -- && test no = "$tmp_diet" -+ && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler -- whole_archive_flag_spec_F77='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec_F77='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers -- whole_archive_flag_spec_F77='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec_F77='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; -@@ -14945,47 +14463,42 @@ - lf95*) # Lahey Fortran 8.1 - whole_archive_flag_spec_F77= - tmp_sharedflag='--shared' ;; -- nagfor*) # NAGFOR 5.3 -- tmp_sharedflag='-Wl,-shared' ;; - xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 -- whole_archive_flag_spec_F77='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec_F77='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object_F77=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 -- whole_archive_flag_spec_F77='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec_F77='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object_F77=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac -- archive_cmds_F77='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_F77='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds_F77='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in -- tcc*) -- export_dynamic_flag_spec_F77='-rdynamic' -- ;; - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - whole_archive_flag_spec_F77='--whole-archive$convenience --no-whole-archive' -- hardcode_libdir_flag_spec_F77='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' - archive_cmds_F77='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds_F77='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -14999,8 +14512,8 @@ - archive_cmds_F77='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -15018,8 +14531,8 @@ - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_F77=no - fi -@@ -15031,7 +14544,7 @@ - ld_shlibs_F77=no - cat <<_LT_EOF 1>&2 - --*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot -+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not - *** reliably create shared libraries on SCO systems. Therefore, libtool - *** is disabling shared libraries support. We urge you to upgrade GNU - *** binutils to release 2.16.91.0.3 or newer. Another option is to modify -@@ -15046,9 +14559,9 @@ - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- hardcode_libdir_flag_spec_F77='$wl-rpath $wl$libdir' -- archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' -+ archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_F77=no - fi -@@ -15065,15 +14578,15 @@ - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_F77=no - fi - ;; - esac - -- if test no = "$ld_shlibs_F77"; then -+ if test "$ld_shlibs_F77" = no; then - runpath_var= - hardcode_libdir_flag_spec_F77= - export_dynamic_flag_spec_F77= -@@ -15089,7 +14602,7 @@ - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - hardcode_minus_L_F77=yes -- if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then -+ if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - hardcode_direct_F77=unsupported -@@ -15097,57 +14610,34 @@ - ;; - - aix[4-9]*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' -- no_entry_flag= -+ no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. -- # -C means demangle to GNU nm, but means don't demangle to AIX nm. -- # Without the "-l" option, or with the "-B" option, AIX nm treats -- # weak defined symbols like other global defined symbols, whereas -- # GNU nm marks them as "W". -- # While the 'weak' keyword is ignored in the Export File, we need -- # it in the Import File for the 'aix-soname' feature, so we have -- # to replace the "-B" option with "-P" for AIX nm. -+ # -C means demangle to AIX nm, but means don't demangle with GNU nm -+ # Also, AIX nm treats weak defined symbols like other global -+ # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then -- export_symbols_cmds_F77='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' -+ export_symbols_cmds_F77='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else -- export_symbols_cmds_F77='`func_echo_all $NM | $SED -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' -+ export_symbols_cmds_F77='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we -- # have runtime linking enabled, and use it for executables. -- # For shared libraries, we enable/disable runtime linking -- # depending on the kind of the shared library created - -- # when "with_aix_soname,aix_use_runtimelinking" is: -- # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables -- # "aix,yes" lib.so shared, rtl:yes, for executables -- # lib.a static archive -- # "both,no" lib.so.V(shr.o) shared, rtl:yes -- # lib.a(lib.so.V) shared, rtl:no, for executables -- # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a(lib.so.V) shared, rtl:no -- # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a static archive -+ # need to do runtime linking. - case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) - for ld_flag in $LDFLAGS; do -- if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then -+ if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done -- if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then -- # With aix-soname=svr4, we create the lib.so.V shared archives only, -- # so we don't have lib.a shared libs to link our executables. -- # We have to force runtime linking in this case. -- aix_use_runtimelinking=yes -- LDFLAGS="$LDFLAGS -Wl,-brtl" -- fi - ;; - esac - -@@ -15166,21 +14656,13 @@ - hardcode_direct_absolute_F77=yes - hardcode_libdir_separator_F77=':' - link_all_deplibs_F77=yes -- file_list_spec_F77='$wl-f,' -- case $with_aix_soname,$aix_use_runtimelinking in -- aix,*) ;; # traditional, no import file -- svr4,* | *,yes) # use import file -- # The Import File defines what to hardcode. -- hardcode_direct_F77=no -- hardcode_direct_absolute_F77=no -- ;; -- esac -+ file_list_spec_F77='${wl}-f,' - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - case $host_os in aix4.[012]|aix4.[012].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ -- collect2name=`$CC -print-prog-name=collect2` -+ collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then -@@ -15199,42 +14681,35 @@ - ;; - esac - shared_flag='-shared' -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag="$shared_flag "'$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag="$shared_flag "'${wl}-G' - fi -- # Need to ensure runtime linking is disabled for the traditional -- # shared library, or the linker may eventually find shared libraries -- # /with/ Import File - we do not want to mix them. -- shared_flag_aix='-shared' -- shared_flag_svr4='-shared $wl-G' - else - # not using gcc -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag='$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag='${wl}-G' - else -- shared_flag='$wl-bM:SRE' -+ shared_flag='${wl}-bM:SRE' - fi -- shared_flag_aix='$wl-bM:SRE' -- shared_flag_svr4='$wl-G' - fi - fi - -- export_dynamic_flag_spec_F77='$wl-bexpall' -+ export_dynamic_flag_spec_F77='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - always_export_symbols_F77=yes -- if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then -+ if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - allow_undefined_flag_F77='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- if test set = "${lt_cv_aix_libpath+set}"; then -+ if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - if ${lt_cv_aix_libpath__F77+:} false; then : -@@ -15263,7 +14738,7 @@ - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath__F77"; then -- lt_cv_aix_libpath__F77=/usr/lib:/lib -+ lt_cv_aix_libpath__F77="/usr/lib:/lib" - fi - - fi -@@ -15271,17 +14746,17 @@ - aix_libpath=$lt_cv_aix_libpath__F77 - fi - -- hardcode_libdir_flag_spec_F77='$wl-blibpath:$libdir:'"$aix_libpath" -- archive_expsym_cmds_F77='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag -+ hardcode_libdir_flag_spec_F77='${wl}-blibpath:$libdir:'"$aix_libpath" -+ archive_expsym_cmds_F77='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -- if test ia64 = "$host_cpu"; then -- hardcode_libdir_flag_spec_F77='$wl-R $libdir:/usr/lib:/lib' -+ if test "$host_cpu" = ia64; then -+ hardcode_libdir_flag_spec_F77='${wl}-R $libdir:/usr/lib:/lib' - allow_undefined_flag_F77="-z nodefs" -- archive_expsym_cmds_F77="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" -+ archive_expsym_cmds_F77="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. -- if test set = "${lt_cv_aix_libpath+set}"; then -+ if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - if ${lt_cv_aix_libpath__F77+:} false; then : -@@ -15310,7 +14785,7 @@ - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath__F77"; then -- lt_cv_aix_libpath__F77=/usr/lib:/lib -+ lt_cv_aix_libpath__F77="/usr/lib:/lib" - fi - - fi -@@ -15318,33 +14793,21 @@ - aix_libpath=$lt_cv_aix_libpath__F77 - fi - -- hardcode_libdir_flag_spec_F77='$wl-blibpath:$libdir:'"$aix_libpath" -+ hardcode_libdir_flag_spec_F77='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -- no_undefined_flag_F77=' $wl-bernotok' -- allow_undefined_flag_F77=' $wl-berok' -- if test yes = "$with_gnu_ld"; then -+ no_undefined_flag_F77=' ${wl}-bernotok' -+ allow_undefined_flag_F77=' ${wl}-berok' -+ if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. -- whole_archive_flag_spec_F77='$wl--whole-archive$convenience $wl--no-whole-archive' -+ whole_archive_flag_spec_F77='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - whole_archive_flag_spec_F77='$convenience' - fi - archive_cmds_need_lc_F77=yes -- archive_expsym_cmds_F77='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' -- # -brtl affects multiple linker settings, -berok does not and is overridden later -- compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([, ]\\)%-berok\\1%g"`' -- if test svr4 != "$with_aix_soname"; then -- # This is similar to how AIX traditionally builds its shared libraries. -- archive_expsym_cmds_F77="$archive_expsym_cmds_F77"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' -- fi -- if test aix != "$with_aix_soname"; then -- archive_expsym_cmds_F77="$archive_expsym_cmds_F77"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' -- else -- # used by -dlpreopen to get the symbols -- archive_expsym_cmds_F77="$archive_expsym_cmds_F77"'~$MV $output_objdir/$realname.d/$soname $output_objdir' -- fi -- archive_expsym_cmds_F77="$archive_expsym_cmds_F77"'~$RM -r $output_objdir/$realname.d' -+ # This is similar to how AIX traditionally builds its shared libraries. -+ archive_expsym_cmds_F77="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; -@@ -15353,7 +14816,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_F77='' - ;; - m68k) -@@ -15383,17 +14846,16 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. -- archive_cmds_F77='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' -- archive_expsym_cmds_F77='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then -- cp "$export_symbols" "$output_objdir/$soname.def"; -- echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; -- else -- $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; -- fi~ -- $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -- linknames=' -+ archive_cmds_F77='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds_F77='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, F77)='true' - enable_shared_with_static_runtimes_F77=yes -@@ -15402,18 +14864,18 @@ - # Don't use ranlib - old_postinstall_cmds_F77='chmod 644 $oldlib' - postlink_cmds_F77='lt_outputfile="@OUTPUT@"~ -- lt_tool_outputfile="@TOOL_OUTPUT@"~ -- case $lt_outputfile in -- *.exe|*.EXE) ;; -- *) -- lt_outputfile=$lt_outputfile.exe -- lt_tool_outputfile=$lt_tool_outputfile.exe -- ;; -- esac~ -- if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then -- $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -- $RM "$lt_outputfile.manifest"; -- fi' -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' - ;; - *) - # Assume MSVC wrapper -@@ -15422,7 +14884,7 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds_F77='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. -@@ -15441,24 +14903,24 @@ - hardcode_direct_F77=no - hardcode_automatic_F77=yes - hardcode_shlibpath_var_F77=unsupported -- if test yes = "$lt_cv_ld_force_load"; then -- whole_archive_flag_spec_F77='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' -+ if test "$lt_cv_ld_force_load" = "yes"; then -+ whole_archive_flag_spec_F77='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - compiler_needs_object_F77=yes - else - whole_archive_flag_spec_F77='' - fi - link_all_deplibs_F77=yes -- allow_undefined_flag_F77=$_lt_dar_allow_undefined -+ allow_undefined_flag_F77="$_lt_dar_allow_undefined" - case $cc_basename in -- ifort*|nagfor*) _lt_dar_can_shared=yes ;; -+ ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac -- if test yes = "$_lt_dar_can_shared"; then -+ if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all -- archive_cmds_F77="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" -- module_cmds_F77="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" -- archive_expsym_cmds_F77="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" -- module_expsym_cmds_F77="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" -+ archive_cmds_F77="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" -+ module_cmds_F77="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" -+ archive_expsym_cmds_F77="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" -+ module_expsym_cmds_F77="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - - else - ld_shlibs_F77=no -@@ -15500,33 +14962,33 @@ - ;; - - hpux9*) -- if test yes = "$GCC"; then -- archive_cmds_F77='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ if test "$GCC" = yes; then -+ archive_cmds_F77='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else -- archive_cmds_F77='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ archive_cmds_F77='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -- hardcode_libdir_flag_spec_F77='$wl+b $wl$libdir' -+ hardcode_libdir_flag_spec_F77='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_F77=: - hardcode_direct_F77=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_F77=yes -- export_dynamic_flag_spec_F77='$wl-E' -+ export_dynamic_flag_spec_F77='${wl}-E' - ;; - - hpux10*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -- archive_cmds_F77='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then -+ archive_cmds_F77='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_F77='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -- if test no = "$with_gnu_ld"; then -- hardcode_libdir_flag_spec_F77='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ hardcode_libdir_flag_spec_F77='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_F77=: - hardcode_direct_F77=yes - hardcode_direct_absolute_F77=yes -- export_dynamic_flag_spec_F77='$wl-E' -+ export_dynamic_flag_spec_F77='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_F77=yes -@@ -15534,33 +14996,33 @@ - ;; - - hpux11*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) -- archive_cmds_F77='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds_F77='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds_F77='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) -- archive_cmds_F77='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds_F77='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds_F77='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - fi -- if test no = "$with_gnu_ld"; then -- hardcode_libdir_flag_spec_F77='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ hardcode_libdir_flag_spec_F77='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_F77=: - - case $host_cpu in -@@ -15571,7 +15033,7 @@ - *) - hardcode_direct_F77=yes - hardcode_direct_absolute_F77=yes -- export_dynamic_flag_spec_F77='$wl-E' -+ export_dynamic_flag_spec_F77='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. -@@ -15582,8 +15044,8 @@ - ;; - - irix5* | irix6* | nonstopux*) -- if test yes = "$GCC"; then -- archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -@@ -15593,8 +15055,8 @@ - if ${lt_cv_irix_exported_symbol+:} false; then : - $as_echo_n "(cached) " >&6 - else -- save_LDFLAGS=$LDFLAGS -- LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat > conftest.$ac_ext <<_ACEOF - - subroutine foo -@@ -15607,34 +15069,24 @@ - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 - $as_echo "$lt_cv_irix_exported_symbol" >&6; } -- if test yes = "$lt_cv_irix_exported_symbol"; then -- archive_expsym_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else -- archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -- archive_expsym_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' -+ archive_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' -+ archive_expsym_cmds_F77='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - archive_cmds_need_lc_F77='no' -- hardcode_libdir_flag_spec_F77='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_F77=: - inherit_rpath_F77=yes - link_all_deplibs_F77=yes - ;; - -- linux*) -- case $cc_basename in -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- ld_shlibs_F77=yes -- archive_cmds_F77='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- ;; -- esac -- ;; -- - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds_F77='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out -@@ -15649,7 +15101,7 @@ - newsos6) - archive_cmds_F77='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_F77=yes -- hardcode_libdir_flag_spec_F77='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_F77=: - hardcode_shlibpath_var_F77=no - ;; -@@ -15657,19 +15109,27 @@ - *nto* | *qnx*) - ;; - -- openbsd* | bitrig*) -+ openbsd*) - if test -f /usr/libexec/ld.so; then - hardcode_direct_F77=yes - hardcode_shlibpath_var_F77=no - hardcode_direct_absolute_F77=yes -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - archive_cmds_F77='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_F77='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' -- hardcode_libdir_flag_spec_F77='$wl-rpath,$libdir' -- export_dynamic_flag_spec_F77='$wl-E' -+ archive_expsym_cmds_F77='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath,$libdir' -+ export_dynamic_flag_spec_F77='${wl}-E' - else -- archive_cmds_F77='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- hardcode_libdir_flag_spec_F77='$wl-rpath,$libdir' -+ case $host_os in -+ openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) -+ archive_cmds_F77='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' -+ hardcode_libdir_flag_spec_F77='-R$libdir' -+ ;; -+ *) -+ archive_cmds_F77='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath,$libdir' -+ ;; -+ esac - fi - else - ld_shlibs_F77=no -@@ -15680,53 +15140,33 @@ - hardcode_libdir_flag_spec_F77='-L$libdir' - hardcode_minus_L_F77=yes - allow_undefined_flag_F77=unsupported -- shrext_cmds=.dll -- archive_cmds_F77='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- archive_expsym_cmds_F77='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- old_archive_From_new_cmds_F77='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- enable_shared_with_static_runtimes_F77=yes -+ archive_cmds_F77='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' -+ old_archive_from_new_cmds_F77='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) -- if test yes = "$GCC"; then -- allow_undefined_flag_F77=' $wl-expect_unresolved $wl\*' -- archive_cmds_F77='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ allow_undefined_flag_F77=' ${wl}-expect_unresolved ${wl}\*' -+ archive_cmds_F77='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - allow_undefined_flag_F77=' -expect_unresolved \*' -- archive_cmds_F77='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ archive_cmds_F77='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - archive_cmds_need_lc_F77='no' -- hardcode_libdir_flag_spec_F77='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_F77=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag -- if test yes = "$GCC"; then -- allow_undefined_flag_F77=' $wl-expect_unresolved $wl\*' -- archive_cmds_F77='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -- hardcode_libdir_flag_spec_F77='$wl-rpath $wl$libdir' -+ if test "$GCC" = yes; then -+ allow_undefined_flag_F77=' ${wl}-expect_unresolved ${wl}\*' -+ archive_cmds_F77='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ hardcode_libdir_flag_spec_F77='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag_F77=' -expect_unresolved \*' -- archive_cmds_F77='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ archive_cmds_F77='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds_F77='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ -- $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' -+ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - hardcode_libdir_flag_spec_F77='-rpath $libdir' -@@ -15737,24 +15177,24 @@ - - solaris*) - no_undefined_flag_F77=' -z defs' -- if test yes = "$GCC"; then -- wlarc='$wl' -- archive_cmds_F77='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ wlarc='${wl}' -+ archive_cmds_F77='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_F77='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' -- archive_cmds_F77='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' -+ archive_cmds_F77='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - archive_expsym_cmds_F77='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' -+ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) -- wlarc='$wl' -- archive_cmds_F77='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ wlarc='${wl}' -+ archive_cmds_F77='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_F77='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi -@@ -15764,11 +15204,11 @@ - solaris2.[0-5] | solaris2.[0-5].*) ;; - *) - # The compiler driver will combine and reorder linker options, -- # but understands '-z linker_flag'. GCC discards it without '$wl', -+ # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) -- if test yes = "$GCC"; then -- whole_archive_flag_spec_F77='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' -+ if test "$GCC" = yes; then -+ whole_archive_flag_spec_F77='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - whole_archive_flag_spec_F77='-z allextract$convenience -z defaultextract' - fi -@@ -15778,10 +15218,10 @@ - ;; - - sunos4*) -- if test sequent = "$host_vendor"; then -+ if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. -- archive_cmds_F77='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_F77='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -15830,43 +15270,43 @@ - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) -- no_undefined_flag_F77='$wl-z,text' -+ no_undefined_flag_F77='${wl}-z,text' - archive_cmds_need_lc_F77=no - hardcode_shlibpath_var_F77=no - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- archive_cmds_F77='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_F77='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ archive_cmds_F77='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds_F77='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- archive_cmds_F77='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_F77='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds_F77='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) -- # Note: We CANNOT use -z defs as we might desire, because we do not -+ # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. -- no_undefined_flag_F77='$wl-z,text' -- allow_undefined_flag_F77='$wl-z,nodefs' -+ no_undefined_flag_F77='${wl}-z,text' -+ allow_undefined_flag_F77='${wl}-z,nodefs' - archive_cmds_need_lc_F77=no - hardcode_shlibpath_var_F77=no -- hardcode_libdir_flag_spec_F77='$wl-R,$libdir' -+ hardcode_libdir_flag_spec_F77='${wl}-R,$libdir' - hardcode_libdir_separator_F77=':' - link_all_deplibs_F77=yes -- export_dynamic_flag_spec_F77='$wl-Bexport' -+ export_dynamic_flag_spec_F77='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- archive_cmds_F77='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_F77='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ archive_cmds_F77='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds_F77='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- archive_cmds_F77='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_F77='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_F77='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds_F77='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - -@@ -15881,10 +15321,10 @@ - ;; - esac - -- if test sni = "$host_vendor"; then -+ if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) -- export_dynamic_flag_spec_F77='$wl-Blargedynsym' -+ export_dynamic_flag_spec_F77='${wl}-Blargedynsym' - ;; - esac - fi -@@ -15892,7 +15332,7 @@ - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_F77" >&5 - $as_echo "$ld_shlibs_F77" >&6; } --test no = "$ld_shlibs_F77" && can_build_shared=no -+test "$ld_shlibs_F77" = no && can_build_shared=no - - with_gnu_ld_F77=$with_gnu_ld - -@@ -15909,7 +15349,7 @@ - # Assume -lc should be added - archive_cmds_need_lc_F77=yes - -- if test yes,yes = "$GCC,$enable_shared"; then -+ if test "$enable_shared" = yes && test "$GCC" = yes; then - case $archive_cmds_F77 in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. -@@ -16037,7 +15477,7 @@ - library_names_spec= - libname_spec='lib$name' - soname_spec= --shrext_cmds=.so -+shrext_cmds=".so" - postinstall_cmds= - postuninstall_cmds= - finish_cmds= -@@ -16054,16 +15494,14 @@ - # flags to be left without arguments - need_version=unknown - -- -- - case $host_os in - aix3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname.a' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. -- soname_spec='$libname$release$shared_ext$major' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - - aix[4-9]*) -@@ -16071,91 +15509,41 @@ - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 -- library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with -- # the line '#! .'. This would cause the generated library to -- # depend on '.', always an invalid library. This was fixed in -+ # the line `#! .'. This would cause the generated library to -+ # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[01] | aix4.[01].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' -- echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then -+ echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac -- # Using Import Files as archive members, it is possible to support -- # filename-based versioning of shared library archives on AIX. While -- # this would work for both with and without runtime linking, it will -- # prevent static linking of such archives. So we do filename-based -- # shared library versioning with .so extension only, which is used -- # when both runtime linking and shared linking is enabled. -- # Unfortunately, runtime linking may impact performance, so we do -- # not want this to be the default eventually. Also, we use the -- # versioned .so libs for executables only if there is the -brtl -- # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. -- # To allow for filename-based versioning support, we need to create -- # libNAME.so.V as an archive file, containing: -- # *) an Import File, referring to the versioned filename of the -- # archive as well as the shared archive member, telling the -- # bitwidth (32 or 64) of that shared object, and providing the -- # list of exported symbols of that shared object, eventually -- # decorated with the 'weak' keyword -- # *) the shared object with the F_LOADONLY flag set, to really avoid -- # it being seen by the linker. -- # At run time we better use the real file rather than another symlink, -- # but for link time we create the symlink libNAME.so -> libNAME.so.V -- -- case $with_aix_soname,$aix_use_runtimelinking in -- # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct -+ # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. -- aix,yes) # traditional libtool -- dynamic_linker='AIX unversionable lib.so' -+ if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib<name>.so - # instead of lib<name>.a to let people know that these are not - # typical AIX shared libraries. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- ;; -- aix,no) # traditional AIX only -- dynamic_linker='AIX lib.a(lib.so.V)' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- ;; -- svr4,*) # full svr4 only -- dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o)" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,yes) # both, prefer svr4 -- dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o), lib.a(lib.so.V)" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # unpreferred sharedlib libNAME.a needs extra handling -- postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' -- postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,no) # both, prefer aix -- dynamic_linker="AIX lib.a(lib.so.V), lib.so.V($shared_archive_member_spec.o)" -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling -- postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' -- postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' -- ;; -- esac -+ library_names_spec='${libname}${release}.a $libname.a' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ fi - shlibpath_var=LIBPATH - fi - ;; -@@ -16165,18 +15553,18 @@ - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. -- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' -+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - - beos*) -- library_names_spec='$libname$shared_ext' -+ library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; -@@ -16184,8 +15572,8 @@ - bsdi[45]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" -@@ -16197,7 +15585,7 @@ - - cygwin* | mingw* | pw32* | cegcc*) - version_type=windows -- shrext_cmds=.dll -+ shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - -@@ -16206,8 +15594,8 @@ - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ -@@ -16223,16 +15611,16 @@ - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' -- soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix -- soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' -- library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' -@@ -16241,8 +15629,8 @@ - *,cl*) - # Native MSVC - libname_spec='$name' -- soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -- library_names_spec='$libname.dll.lib' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) -@@ -16269,7 +15657,7 @@ - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) -- sys_lib_search_path_spec=$LIB -+ sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -@@ -16282,8 +15670,8 @@ - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' -@@ -16296,7 +15684,7 @@ - - *) - # Assume MSVC wrapper -- library_names_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext $libname.lib' -+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac -@@ -16309,8 +15697,8 @@ - version_type=darwin - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$major$shared_ext' -+ library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' -+ soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' -@@ -16322,8 +15710,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -16341,13 +15729,12 @@ - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac -@@ -16372,15 +15759,26 @@ - esac - ;; - -+gnu*) -+ version_type=linux # correct to gnu/linux during the next big refactor -+ need_lib_prefix=no -+ need_version=no -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ shlibpath_var=LD_LIBRARY_PATH -+ shlibpath_overrides_runpath=no -+ hardcode_into_libs=yes -+ ;; -+ - haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH -- shlibpath_overrides_runpath=no -+ shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; -@@ -16398,15 +15796,14 @@ - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -- if test 32 = "$HPUX_IA64_MODE"; then -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux32 - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux64 - fi -+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' -@@ -16414,8 +15811,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; -@@ -16424,8 +15821,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... -@@ -16438,8 +15835,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -16450,7 +15847,7 @@ - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix -@@ -16458,8 +15855,8 @@ - esac - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= -@@ -16478,8 +15875,8 @@ - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no -- sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" -- sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" -+ sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" -+ sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -@@ -16488,33 +15885,13 @@ - dynamic_linker=no - ;; - --linux*android*) -- version_type=none # Android doesn't support versioned libraries. -- need_lib_prefix=no -- need_version=no -- library_names_spec='$libname$release$shared_ext' -- soname_spec='$libname$release$shared_ext' -- finish_cmds= -- shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -- -- # This implies no fast_install, which is unacceptable. -- # Some rework will be needed to allow for fast_install -- # before this can be enabled. -- hardcode_into_libs=yes -- -- dynamic_linker='Android linker' -- # Don't embed -rpath directories since the linker doesn't support them. -- hardcode_libdir_flag_spec_F77='-L$libdir' -- ;; -- - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -16552,15 +15929,14 @@ - # before this can be enabled. - hardcode_into_libs=yes - -- # Ideally, we could use ldconfig to report *all* directores which are -- # searched for libraries, however this is still not possible. Aside from not -- # being certain /sbin/ldconfig is available, command -- # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, -- # even though it is searched at run-time. Try to do the best guess by -- # appending ld.so.conf contents (and includes) to the search path. -+ # Add ABI-specific directories to the system library path. -+ sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" -+ -+ # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` -- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" -+ sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" -+ - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on -@@ -16577,12 +15953,12 @@ - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH -@@ -16592,7 +15968,7 @@ - - newsos6) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; -@@ -16601,68 +15977,58 @@ - version_type=qnx - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - --openbsd* | bitrig*) -+openbsd*) - version_type=sunos -- sys_lib_dlsearch_path_spec=/usr/lib -+ sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -- need_version=no -- else -- need_version=yes -- fi -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. -+ case $host_os in -+ openbsd3.3 | openbsd3.3.*) need_version=yes ;; -+ *) need_version=no ;; -+ esac -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ case $host_os in -+ openbsd2.[89] | openbsd2.[89].*) -+ shlibpath_overrides_runpath=no -+ ;; -+ *) -+ shlibpath_overrides_runpath=yes -+ ;; -+ esac -+ else -+ shlibpath_overrides_runpath=yes -+ fi - ;; - - os2*) - libname_spec='$name' -- version_type=windows -- shrext_cmds=.dll -- need_version=no -+ shrext_cmds=".dll" - need_lib_prefix=no -- # OS/2 can only load a DLL with a base name of 8 characters or less. -- soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; -- v=$($ECHO $release$versuffix | tr -d .-); -- n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); -- $ECHO $n$v`$shared_ext' -- library_names_spec='${libname}_dll.$libext' -+ library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' -- shlibpath_var=BEGINLIBPATH -- sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ -- dldir=$destdir/`dirname \$dlpath`~ -- test -d \$dldir || mkdir -p \$dldir~ -- $install_prog $dir/$dlname \$dldir/$dlname~ -- chmod a+x \$dldir/$dlname~ -- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then -- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; -- fi' -- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ -- dlpath=$dir/\$dldll~ -- $RM \$dlpath' -+ shlibpath_var=LIBPATH - ;; - - osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -+ sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - - rdos*) -@@ -16673,8 +16039,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -@@ -16684,11 +16050,11 @@ - - sunos4*) - version_type=sunos -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes -@@ -16696,8 +16062,8 @@ - - sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) -@@ -16718,24 +16084,24 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' -- soname_spec='$libname$shared_ext.$major' -+ library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' -+ soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) -- version_type=sco -+ version_type=freebsd-elf - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' -@@ -16753,7 +16119,7 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes -@@ -16761,8 +16127,8 @@ - - uts4*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -16772,32 +16138,20 @@ - esac - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 - $as_echo "$dynamic_linker" >&6; } --test no = "$dynamic_linker" && can_build_shared=no -+test "$dynamic_linker" = no && can_build_shared=no - - variables_saved_for_relink="PATH $shlibpath_var $runpath_var" --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" - fi - --if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then -- sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec -+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then -+ sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" - fi -- --if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then -- sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec -+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then -+ sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" - fi - --# remember unaugmented sys_lib_dlsearch_path content for libtool script decls... --configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec -- --# ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code --func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" -- --# to be used as default LT_SYS_LIBRARY_PATH value in generated libtool --configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH -- -- -- - - - -@@ -16840,15 +16194,15 @@ - hardcode_action_F77= - if test -n "$hardcode_libdir_flag_spec_F77" || - test -n "$runpath_var_F77" || -- test yes = "$hardcode_automatic_F77"; then -+ test "X$hardcode_automatic_F77" = "Xyes" ; then - - # We can hardcode non-existent directories. -- if test no != "$hardcode_direct_F77" && -+ if test "$hardcode_direct_F77" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one -- ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, F77)" && -- test no != "$hardcode_minus_L_F77"; then -+ ## test "$_LT_TAGVAR(hardcode_shlibpath_var, F77)" != no && -+ test "$hardcode_minus_L_F77" != no; then - # Linking always hardcodes the temporary library directory. - hardcode_action_F77=relink - else -@@ -16863,12 +16217,12 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_F77" >&5 - $as_echo "$hardcode_action_F77" >&6; } - --if test relink = "$hardcode_action_F77" || -- test yes = "$inherit_rpath_F77"; then -+if test "$hardcode_action_F77" = relink || -+ test "$inherit_rpath_F77" = yes; then - # Fast installation is not supported - enable_fast_install=no --elif test yes = "$shlibpath_overrides_runpath" || -- test no = "$enable_shared"; then -+elif test "$shlibpath_overrides_runpath" = yes || -+ test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless - fi -@@ -16882,9 +16236,9 @@ - fi # test -n "$compiler" - - GCC=$lt_save_GCC -- CC=$lt_save_CC -- CFLAGS=$lt_save_CFLAGS --fi # test yes != "$_lt_disable_F77" -+ CC="$lt_save_CC" -+ CFLAGS="$lt_save_CFLAGS" -+fi # test "$_lt_disable_F77" != yes - - ac_ext=c - ac_cpp='$CPP $CPPFLAGS' -@@ -17111,9 +16465,9 @@ - - func_stripname_cnf () - { -- case $2 in -- .*) func_stripname_result=`$ECHO "$3" | $SED "s%^$1%%; s%\\\\$2\$%%"`;; -- *) func_stripname_result=`$ECHO "$3" | $SED "s%^$1%%; s%$2\$%%"`;; -+ case ${2} in -+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; - esac - } # func_stripname_cnf - -@@ -17124,7 +16478,7 @@ - ac_compiler_gnu=$ac_cv_fc_compiler_gnu - - --if test -z "$FC" || test no = "$FC"; then -+if test -z "$FC" || test "X$FC" = "Xno"; then - _lt_disable_FC=yes - fi - -@@ -17161,7 +16515,7 @@ - # the FC compiler isn't working. Some variables (like enable_shared) - # are currently assumed to apply to all compilers on this platform, - # and will be corrupted by setting them based on a non-working compiler. --if test yes != "$_lt_disable_FC"; then -+if test "$_lt_disable_FC" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="\ - subroutine t -@@ -17207,7 +16561,7 @@ - - - # Allow CC to be a program name with arguments. -- lt_save_CC=$CC -+ lt_save_CC="$CC" - lt_save_GCC=$GCC - lt_save_CFLAGS=$CFLAGS - CC=${FC-"f95"} -@@ -17216,8 +16570,15 @@ - GCC=$ac_cv_fc_compiler_gnu - - compiler_FC=$CC -- func_cc_basename $compiler --cc_basename=$func_cc_basename_result -+ for cc_temp in $compiler""; do -+ case $cc_temp in -+ compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -+ distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -+ \-*) ;; -+ *) break;; -+ esac -+done -+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - - - if test -n "$compiler"; then -@@ -17228,25 +16589,21 @@ - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 - $as_echo_n "checking whether to build shared libraries... " >&6; } -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - aix[4-9]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -17256,12 +16613,12 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 - $as_echo_n "checking whether to build static libraries... " >&6; } - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 - $as_echo "$enable_static" >&6; } - -- GCC_FC=$ac_cv_fc_compiler_gnu -- LD_FC=$LD -+ GCC_FC="$ac_cv_fc_compiler_gnu" -+ LD_FC="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change -@@ -17304,13 +16661,13 @@ - pre_test_object_deps_done=no - - for p in `eval "$output_verbose_link_cmd"`; do -- case $prev$p in -+ case ${prev}${p} in - - -L* | -R* | -l*) - # Some compilers place space between "-{L,R}" and the path. - # Remove the space. -- if test x-L = "$p" || -- test x-R = "$p"; then -+ if test $p = "-L" || -+ test $p = "-R"; then - prev=$p - continue - fi -@@ -17326,16 +16683,16 @@ - case $p in - =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; - esac -- if test no = "$pre_test_object_deps_done"; then -- case $prev in -+ if test "$pre_test_object_deps_done" = no; then -+ case ${prev} in - -L | -R) - # Internal compiler library paths should come after those - # provided the user. The postdeps already come after the - # user supplied libs so there is no need to process them. - if test -z "$compiler_lib_search_path_FC"; then -- compiler_lib_search_path_FC=$prev$p -+ compiler_lib_search_path_FC="${prev}${p}" - else -- compiler_lib_search_path_FC="${compiler_lib_search_path_FC} $prev$p" -+ compiler_lib_search_path_FC="${compiler_lib_search_path_FC} ${prev}${p}" - fi - ;; - # The "-l" case would never come before the object being -@@ -17343,9 +16700,9 @@ - esac - else - if test -z "$postdeps_FC"; then -- postdeps_FC=$prev$p -+ postdeps_FC="${prev}${p}" - else -- postdeps_FC="${postdeps_FC} $prev$p" -+ postdeps_FC="${postdeps_FC} ${prev}${p}" - fi - fi - prev= -@@ -17360,15 +16717,15 @@ - continue - fi - -- if test no = "$pre_test_object_deps_done"; then -+ if test "$pre_test_object_deps_done" = no; then - if test -z "$predep_objects_FC"; then -- predep_objects_FC=$p -+ predep_objects_FC="$p" - else - predep_objects_FC="$predep_objects_FC $p" - fi - else - if test -z "$postdep_objects_FC"; then -- postdep_objects_FC=$p -+ postdep_objects_FC="$p" - else - postdep_objects_FC="$postdep_objects_FC $p" - fi -@@ -17397,7 +16754,7 @@ - esac - compiler_lib_search_dirs_FC= - if test -n "${compiler_lib_search_path_FC}"; then -- compiler_lib_search_dirs_FC=`echo " ${compiler_lib_search_path_FC}" | $SED -e 's! -L! !g' -e 's!^ !!'` -+ compiler_lib_search_dirs_FC=`echo " ${compiler_lib_search_path_FC}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` - fi - - -@@ -17435,18 +16792,17 @@ - lt_prog_compiler_static_FC= - - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_static_FC='-static' - - case $host_os in - aix*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static_FC='-Bstatic' - fi -- lt_prog_compiler_pic_FC='-fPIC' - ;; - - amigaos*) -@@ -17457,8 +16813,8 @@ - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but -- # adding the '-m68020' flag to GCC prevents building anything better, -- # like '-m68040'. -+ # adding the `-m68020' flag to GCC prevents building anything better, -+ # like `-m68040'. - lt_prog_compiler_pic_FC='-m68020 -resident32 -malways-restore-a4' - ;; - esac -@@ -17474,11 +16830,6 @@ - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - lt_prog_compiler_pic_FC='-DDLL_EXPORT' -- case $host_os in -- os2*) -- lt_prog_compiler_static_FC='$wl-static' -- ;; -- esac - ;; - - darwin* | rhapsody*) -@@ -17549,7 +16900,7 @@ - case $host_os in - aix*) - lt_prog_compiler_wl_FC='-Wl,' -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static_FC='-Bstatic' - else -@@ -17557,29 +16908,10 @@ - fi - ;; - -- darwin* | rhapsody*) -- # PIC is the default on this platform -- # Common symbols not allowed in MH_DYLIB files -- lt_prog_compiler_pic_FC='-fno-common' -- case $cc_basename in -- nagfor*) -- # NAG Fortran compiler -- lt_prog_compiler_wl_FC='-Wl,-Wl,,' -- lt_prog_compiler_pic_FC='-PIC' -- lt_prog_compiler_static_FC='-Bstatic' -- ;; -- esac -- ;; -- - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - lt_prog_compiler_pic_FC='-DDLL_EXPORT' -- case $host_os in -- os2*) -- lt_prog_compiler_static_FC='$wl-static' -- ;; -- esac - ;; - - hpux9* | hpux10* | hpux11*) -@@ -17595,7 +16927,7 @@ - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? -- lt_prog_compiler_static_FC='$wl-a ${wl}archive' -+ lt_prog_compiler_static_FC='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) -@@ -17604,9 +16936,9 @@ - lt_prog_compiler_static_FC='-non_shared' - ;; - -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in -- # old Intel for x86_64, which still supported -KPIC. -+ # old Intel for x86_64 which still supported -KPIC. - ecc*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-KPIC' -@@ -17631,12 +16963,6 @@ - lt_prog_compiler_pic_FC='-PIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- lt_prog_compiler_wl_FC='-Wl,' -- lt_prog_compiler_pic_FC='-fPIC' -- lt_prog_compiler_static_FC='-static' -- ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -17734,7 +17060,7 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - lt_prog_compiler_pic_FC='-Kconform_pic' - lt_prog_compiler_static_FC='-Bstatic' - fi -@@ -17763,7 +17089,7 @@ - fi - - case $host_os in -- # For platforms that do not support PIC, -DPIC is meaningless: -+ # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - lt_prog_compiler_pic_FC= - ;; -@@ -17795,7 +17121,7 @@ - lt_cv_prog_compiler_pic_works_FC=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext -- lt_compiler_flag="$lt_prog_compiler_pic_FC" ## exclude from sc_useless_quotes_in_assignment -+ lt_compiler_flag="$lt_prog_compiler_pic_FC" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins -@@ -17825,7 +17151,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_FC" >&5 - $as_echo "$lt_cv_prog_compiler_pic_works_FC" >&6; } - --if test yes = "$lt_cv_prog_compiler_pic_works_FC"; then -+if test x"$lt_cv_prog_compiler_pic_works_FC" = xyes; then - case $lt_prog_compiler_pic_FC in - "" | " "*) ;; - *) lt_prog_compiler_pic_FC=" $lt_prog_compiler_pic_FC" ;; -@@ -17851,7 +17177,7 @@ - $as_echo_n "(cached) " >&6 - else - lt_cv_prog_compiler_static_works_FC=no -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $lt_tmp_static_flag" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then -@@ -17870,13 +17196,13 @@ - fi - fi - $RM -r conftest* -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_FC" >&5 - $as_echo "$lt_cv_prog_compiler_static_works_FC" >&6; } - --if test yes = "$lt_cv_prog_compiler_static_works_FC"; then -+if test x"$lt_cv_prog_compiler_static_works_FC" = xyes; then - : - else - lt_prog_compiler_static_FC= -@@ -17990,8 +17316,8 @@ - - - --hard_links=nottested --if test no = "$lt_cv_prog_compiler_c_o_FC" && test no != "$need_locks"; then -+hard_links="nottested" -+if test "$lt_cv_prog_compiler_c_o_FC" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 - $as_echo_n "checking if we can lock with hard links... " >&6; } -@@ -18003,9 +17329,9 @@ - ln conftest.a conftest.b 2>/dev/null && hard_links=no - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 - $as_echo "$hard_links" >&6; } -- if test no = "$hard_links"; then -- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&5 --$as_echo "$as_me: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&2;} -+ if test "$hard_links" = no; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -+$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} - need_locks=warn - fi - else -@@ -18045,9 +17371,9 @@ - # included in the symbol list - include_expsyms_FC= - # exclude_expsyms can be an extended regexp of symbols to exclude -- # it will be wrapped by ' (' and ')$', so one must not match beginning or -- # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', -- # as well as any symbol that contains 'd'. -+ # it will be wrapped by ` (' and `)$', so one must not match beginning or -+ # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', -+ # as well as any symbol that contains `d'. - exclude_expsyms_FC='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if -@@ -18062,7 +17388,7 @@ - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; -@@ -18070,7 +17396,7 @@ - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; -- openbsd* | bitrig*) -+ openbsd*) - with_gnu_ld=no - ;; - esac -@@ -18080,7 +17406,7 @@ - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility -@@ -18102,24 +17428,24 @@ - esac - fi - -- if test yes = "$lt_use_gnu_ld_interface"; then -+ if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty -- wlarc='$wl' -+ wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH -- hardcode_libdir_flag_spec_FC='$wl-rpath $wl$libdir' -- export_dynamic_flag_spec_FC='$wl--export-dynamic' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' -+ export_dynamic_flag_spec_FC='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then -- whole_archive_flag_spec_FC=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ whole_archive_flag_spec_FC="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - whole_archive_flag_spec_FC= - fi - supports_anon_versioning=no -- case `$LD -v | $SED -e 's/(^)\+)\s\+//' 2>&1` in -+ case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... -@@ -18132,7 +17458,7 @@ - case $host_os in - aix[3-9]*) - # On AIX/PPC, the GNU linker is very broken -- if test ia64 != "$host_cpu"; then -+ if test "$host_cpu" != ia64; then - ld_shlibs_FC=no - cat <<_LT_EOF 1>&2 - -@@ -18151,7 +17477,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='' - ;; - m68k) -@@ -18167,7 +17493,7 @@ - allow_undefined_flag_FC=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME -- archive_cmds_FC='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_FC='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - ld_shlibs_FC=no - fi -@@ -18177,7 +17503,7 @@ - # _LT_TAGVAR(hardcode_libdir_flag_spec, FC) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec_FC='-L$libdir' -- export_dynamic_flag_spec_FC='$wl--export-all-symbols' -+ export_dynamic_flag_spec_FC='${wl}--export-all-symbols' - allow_undefined_flag_FC=unsupported - always_export_symbols_FC=no - enable_shared_with_static_runtimes_FC=yes -@@ -18185,89 +17511,61 @@ - exclude_expsyms_FC='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file, use it as -- # is; otherwise, prepend EXPORTS... -- archive_expsym_cmds_FC='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs_FC=no - fi - ;; - - haiku*) -- archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - link_all_deplibs_FC=yes - ;; - -- os2*) -- hardcode_libdir_flag_spec_FC='-L$libdir' -- hardcode_minus_L_FC=yes -- allow_undefined_flag_FC=unsupported -- shrext_cmds=.dll -- archive_cmds_FC='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- archive_expsym_cmds_FC='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- old_archive_From_new_cmds_FC='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- enable_shared_with_static_runtimes_FC=yes -- ;; -- - interix[3-9]*) - hardcode_direct_FC=no - hardcode_shlibpath_var_FC=no -- hardcode_libdir_flag_spec_FC='$wl-rpath,$libdir' -- export_dynamic_flag_spec_FC='$wl-E' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' -+ export_dynamic_flag_spec_FC='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. -- archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -- archive_expsym_cmds_FC='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ archive_expsym_cmds_FC='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no -- if test linux-dietlibc = "$host_os"; then -+ if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ -- && test no = "$tmp_diet" -+ && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler -- whole_archive_flag_spec_FC='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers -- whole_archive_flag_spec_FC='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; -@@ -18278,47 +17576,42 @@ - lf95*) # Lahey Fortran 8.1 - whole_archive_flag_spec_FC= - tmp_sharedflag='--shared' ;; -- nagfor*) # NAGFOR 5.3 -- tmp_sharedflag='-Wl,-shared' ;; - xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 -- whole_archive_flag_spec_FC='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object_FC=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 -- whole_archive_flag_spec_FC='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec_FC='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object_FC=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac -- archive_cmds_FC='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_FC='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in -- tcc*) -- export_dynamic_flag_spec_FC='-rdynamic' -- ;; - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - whole_archive_flag_spec_FC='--whole-archive$convenience --no-whole-archive' -- hardcode_libdir_flag_spec_FC='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - archive_cmds_FC='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -18332,8 +17625,8 @@ - archive_cmds_FC='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -18351,8 +17644,8 @@ - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi -@@ -18364,7 +17657,7 @@ - ld_shlibs_FC=no - cat <<_LT_EOF 1>&2 - --*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot -+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not - *** reliably create shared libraries on SCO systems. Therefore, libtool - *** is disabling shared libraries support. We urge you to upgrade GNU - *** binutils to release 2.16.91.0.3 or newer. Another option is to modify -@@ -18379,9 +17672,9 @@ - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- hardcode_libdir_flag_spec_FC='$wl-rpath $wl$libdir' -- archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' -+ archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi -@@ -18398,15 +17691,15 @@ - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi - ;; - esac - -- if test no = "$ld_shlibs_FC"; then -+ if test "$ld_shlibs_FC" = no; then - runpath_var= - hardcode_libdir_flag_spec_FC= - export_dynamic_flag_spec_FC= -@@ -18422,7 +17715,7 @@ - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - hardcode_minus_L_FC=yes -- if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then -+ if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - hardcode_direct_FC=unsupported -@@ -18430,57 +17723,34 @@ - ;; - - aix[4-9]*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' -- no_entry_flag= -+ no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. -- # -C means demangle to GNU nm, but means don't demangle to AIX nm. -- # Without the "-l" option, or with the "-B" option, AIX nm treats -- # weak defined symbols like other global defined symbols, whereas -- # GNU nm marks them as "W". -- # While the 'weak' keyword is ignored in the Export File, we need -- # it in the Import File for the 'aix-soname' feature, so we have -- # to replace the "-B" option with "-P" for AIX nm. -+ # -C means demangle to AIX nm, but means don't demangle with GNU nm -+ # Also, AIX nm treats weak defined symbols like other global -+ # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then -- export_symbols_cmds_FC='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' -+ export_symbols_cmds_FC='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else -- export_symbols_cmds_FC='`func_echo_all $NM | $SED -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' -+ export_symbols_cmds_FC='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we -- # have runtime linking enabled, and use it for executables. -- # For shared libraries, we enable/disable runtime linking -- # depending on the kind of the shared library created - -- # when "with_aix_soname,aix_use_runtimelinking" is: -- # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables -- # "aix,yes" lib.so shared, rtl:yes, for executables -- # lib.a static archive -- # "both,no" lib.so.V(shr.o) shared, rtl:yes -- # lib.a(lib.so.V) shared, rtl:no, for executables -- # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a(lib.so.V) shared, rtl:no -- # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a static archive -+ # need to do runtime linking. - case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) - for ld_flag in $LDFLAGS; do -- if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then -+ if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done -- if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then -- # With aix-soname=svr4, we create the lib.so.V shared archives only, -- # so we don't have lib.a shared libs to link our executables. -- # We have to force runtime linking in this case. -- aix_use_runtimelinking=yes -- LDFLAGS="$LDFLAGS -Wl,-brtl" -- fi - ;; - esac - -@@ -18499,21 +17769,13 @@ - hardcode_direct_absolute_FC=yes - hardcode_libdir_separator_FC=':' - link_all_deplibs_FC=yes -- file_list_spec_FC='$wl-f,' -- case $with_aix_soname,$aix_use_runtimelinking in -- aix,*) ;; # traditional, no import file -- svr4,* | *,yes) # use import file -- # The Import File defines what to hardcode. -- hardcode_direct_FC=no -- hardcode_direct_absolute_FC=no -- ;; -- esac -+ file_list_spec_FC='${wl}-f,' - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - case $host_os in aix4.[012]|aix4.[012].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ -- collect2name=`$CC -print-prog-name=collect2` -+ collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then -@@ -18532,42 +17794,35 @@ - ;; - esac - shared_flag='-shared' -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag="$shared_flag "'$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag="$shared_flag "'${wl}-G' - fi -- # Need to ensure runtime linking is disabled for the traditional -- # shared library, or the linker may eventually find shared libraries -- # /with/ Import File - we do not want to mix them. -- shared_flag_aix='-shared' -- shared_flag_svr4='-shared $wl-G' - else - # not using gcc -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag='$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag='${wl}-G' - else -- shared_flag='$wl-bM:SRE' -+ shared_flag='${wl}-bM:SRE' - fi -- shared_flag_aix='$wl-bM:SRE' -- shared_flag_svr4='$wl-G' - fi - fi - -- export_dynamic_flag_spec_FC='$wl-bexpall' -+ export_dynamic_flag_spec_FC='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - always_export_symbols_FC=yes -- if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then -+ if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - allow_undefined_flag_FC='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- if test set = "${lt_cv_aix_libpath+set}"; then -+ if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - if ${lt_cv_aix_libpath__FC+:} false; then : -@@ -18596,7 +17851,7 @@ - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath__FC"; then -- lt_cv_aix_libpath__FC=/usr/lib:/lib -+ lt_cv_aix_libpath__FC="/usr/lib:/lib" - fi - - fi -@@ -18604,17 +17859,17 @@ - aix_libpath=$lt_cv_aix_libpath__FC - fi - -- hardcode_libdir_flag_spec_FC='$wl-blibpath:$libdir:'"$aix_libpath" -- archive_expsym_cmds_FC='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag -+ hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath" -+ archive_expsym_cmds_FC='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -- if test ia64 = "$host_cpu"; then -- hardcode_libdir_flag_spec_FC='$wl-R $libdir:/usr/lib:/lib' -+ if test "$host_cpu" = ia64; then -+ hardcode_libdir_flag_spec_FC='${wl}-R $libdir:/usr/lib:/lib' - allow_undefined_flag_FC="-z nodefs" -- archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" -+ archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. -- if test set = "${lt_cv_aix_libpath+set}"; then -+ if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - if ${lt_cv_aix_libpath__FC+:} false; then : -@@ -18643,7 +17898,7 @@ - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath__FC"; then -- lt_cv_aix_libpath__FC=/usr/lib:/lib -+ lt_cv_aix_libpath__FC="/usr/lib:/lib" - fi - - fi -@@ -18651,33 +17906,21 @@ - aix_libpath=$lt_cv_aix_libpath__FC - fi - -- hardcode_libdir_flag_spec_FC='$wl-blibpath:$libdir:'"$aix_libpath" -+ hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -- no_undefined_flag_FC=' $wl-bernotok' -- allow_undefined_flag_FC=' $wl-berok' -- if test yes = "$with_gnu_ld"; then -+ no_undefined_flag_FC=' ${wl}-bernotok' -+ allow_undefined_flag_FC=' ${wl}-berok' -+ if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. -- whole_archive_flag_spec_FC='$wl--whole-archive$convenience $wl--no-whole-archive' -+ whole_archive_flag_spec_FC='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - whole_archive_flag_spec_FC='$convenience' - fi - archive_cmds_need_lc_FC=yes -- archive_expsym_cmds_FC='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' -- # -brtl affects multiple linker settings, -berok does not and is overridden later -- compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([, ]\\)%-berok\\1%g"`' -- if test svr4 != "$with_aix_soname"; then -- # This is similar to how AIX traditionally builds its shared libraries. -- archive_expsym_cmds_FC="$archive_expsym_cmds_FC"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' -- fi -- if test aix != "$with_aix_soname"; then -- archive_expsym_cmds_FC="$archive_expsym_cmds_FC"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' -- else -- # used by -dlpreopen to get the symbols -- archive_expsym_cmds_FC="$archive_expsym_cmds_FC"'~$MV $output_objdir/$realname.d/$soname $output_objdir' -- fi -- archive_expsym_cmds_FC="$archive_expsym_cmds_FC"'~$RM -r $output_objdir/$realname.d' -+ # This is similar to how AIX traditionally builds its shared libraries. -+ archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; -@@ -18686,7 +17929,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='' - ;; - m68k) -@@ -18716,17 +17959,16 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. -- archive_cmds_FC='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' -- archive_expsym_cmds_FC='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then -- cp "$export_symbols" "$output_objdir/$soname.def"; -- echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; -- else -- $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; -- fi~ -- $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -- linknames=' -+ archive_cmds_FC='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, FC)='true' - enable_shared_with_static_runtimes_FC=yes -@@ -18735,18 +17977,18 @@ - # Don't use ranlib - old_postinstall_cmds_FC='chmod 644 $oldlib' - postlink_cmds_FC='lt_outputfile="@OUTPUT@"~ -- lt_tool_outputfile="@TOOL_OUTPUT@"~ -- case $lt_outputfile in -- *.exe|*.EXE) ;; -- *) -- lt_outputfile=$lt_outputfile.exe -- lt_tool_outputfile=$lt_tool_outputfile.exe -- ;; -- esac~ -- if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then -- $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -- $RM "$lt_outputfile.manifest"; -- fi' -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' - ;; - *) - # Assume MSVC wrapper -@@ -18755,7 +17997,7 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds_FC='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. -@@ -18774,24 +18016,24 @@ - hardcode_direct_FC=no - hardcode_automatic_FC=yes - hardcode_shlibpath_var_FC=unsupported -- if test yes = "$lt_cv_ld_force_load"; then -- whole_archive_flag_spec_FC='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' -+ if test "$lt_cv_ld_force_load" = "yes"; then -+ whole_archive_flag_spec_FC='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - compiler_needs_object_FC=yes - else - whole_archive_flag_spec_FC='' - fi - link_all_deplibs_FC=yes -- allow_undefined_flag_FC=$_lt_dar_allow_undefined -+ allow_undefined_flag_FC="$_lt_dar_allow_undefined" - case $cc_basename in -- ifort*|nagfor*) _lt_dar_can_shared=yes ;; -+ ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac -- if test yes = "$_lt_dar_can_shared"; then -+ if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all -- archive_cmds_FC="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" -- module_cmds_FC="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" -- archive_expsym_cmds_FC="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" -- module_expsym_cmds_FC="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" -+ archive_cmds_FC="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" -+ module_cmds_FC="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" -+ archive_expsym_cmds_FC="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" -+ module_expsym_cmds_FC="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - - else - ld_shlibs_FC=no -@@ -18833,33 +18075,33 @@ - ;; - - hpux9*) -- if test yes = "$GCC"; then -- archive_cmds_FC='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ if test "$GCC" = yes; then -+ archive_cmds_FC='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else -- archive_cmds_FC='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ archive_cmds_FC='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -- hardcode_libdir_flag_spec_FC='$wl+b $wl$libdir' -+ hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_direct_FC=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_FC=yes -- export_dynamic_flag_spec_FC='$wl-E' -+ export_dynamic_flag_spec_FC='${wl}-E' - ;; - - hpux10*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -- archive_cmds_FC='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then -+ archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -- if test no = "$with_gnu_ld"; then -- hardcode_libdir_flag_spec_FC='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_direct_FC=yes - hardcode_direct_absolute_FC=yes -- export_dynamic_flag_spec_FC='$wl-E' -+ export_dynamic_flag_spec_FC='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_FC=yes -@@ -18867,33 +18109,33 @@ - ;; - - hpux11*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) -- archive_cmds_FC='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds_FC='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds_FC='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) -- archive_cmds_FC='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds_FC='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds_FC='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - fi -- if test no = "$with_gnu_ld"; then -- hardcode_libdir_flag_spec_FC='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - - case $host_cpu in -@@ -18904,7 +18146,7 @@ - *) - hardcode_direct_FC=yes - hardcode_direct_absolute_FC=yes -- export_dynamic_flag_spec_FC='$wl-E' -+ export_dynamic_flag_spec_FC='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. -@@ -18915,8 +18157,8 @@ - ;; - - irix5* | irix6* | nonstopux*) -- if test yes = "$GCC"; then -- archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -@@ -18926,8 +18168,8 @@ - if ${lt_cv_irix_exported_symbol+:} false; then : - $as_echo_n "(cached) " >&6 - else -- save_LDFLAGS=$LDFLAGS -- LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat > conftest.$ac_ext <<_ACEOF - - subroutine foo -@@ -18940,34 +18182,24 @@ - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 - $as_echo "$lt_cv_irix_exported_symbol" >&6; } -- if test yes = "$lt_cv_irix_exported_symbol"; then -- archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else -- archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -- archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' -+ archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' -+ archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - archive_cmds_need_lc_FC='no' -- hardcode_libdir_flag_spec_FC='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - inherit_rpath_FC=yes - link_all_deplibs_FC=yes - ;; - -- linux*) -- case $cc_basename in -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- ld_shlibs_FC=yes -- archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- ;; -- esac -- ;; -- - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out -@@ -18982,7 +18214,7 @@ - newsos6) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=yes -- hardcode_libdir_flag_spec_FC='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_shlibpath_var_FC=no - ;; -@@ -18990,19 +18222,27 @@ - *nto* | *qnx*) - ;; - -- openbsd* | bitrig*) -+ openbsd*) - if test -f /usr/libexec/ld.so; then - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - hardcode_direct_absolute_FC=yes -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' -- hardcode_libdir_flag_spec_FC='$wl-rpath,$libdir' -- export_dynamic_flag_spec_FC='$wl-E' -+ archive_expsym_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' -+ export_dynamic_flag_spec_FC='${wl}-E' - else -- archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- hardcode_libdir_flag_spec_FC='$wl-rpath,$libdir' -+ case $host_os in -+ openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) -+ archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' -+ hardcode_libdir_flag_spec_FC='-R$libdir' -+ ;; -+ *) -+ archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' -+ ;; -+ esac - fi - else - ld_shlibs_FC=no -@@ -19013,53 +18253,33 @@ - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_minus_L_FC=yes - allow_undefined_flag_FC=unsupported -- shrext_cmds=.dll -- archive_cmds_FC='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- archive_expsym_cmds_FC='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- old_archive_From_new_cmds_FC='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- enable_shared_with_static_runtimes_FC=yes -+ archive_cmds_FC='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' -+ old_archive_from_new_cmds_FC='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) -- if test yes = "$GCC"; then -- allow_undefined_flag_FC=' $wl-expect_unresolved $wl\*' -- archive_cmds_FC='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*' -+ archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - allow_undefined_flag_FC=' -expect_unresolved \*' -- archive_cmds_FC='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - archive_cmds_need_lc_FC='no' -- hardcode_libdir_flag_spec_FC='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag -- if test yes = "$GCC"; then -- allow_undefined_flag_FC=' $wl-expect_unresolved $wl\*' -- archive_cmds_FC='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -- hardcode_libdir_flag_spec_FC='$wl-rpath $wl$libdir' -+ if test "$GCC" = yes; then -+ allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*' -+ archive_cmds_FC='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag_FC=' -expect_unresolved \*' -- archive_cmds_FC='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds_FC='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ -- $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' -+ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - hardcode_libdir_flag_spec_FC='-rpath $libdir' -@@ -19070,24 +18290,24 @@ - - solaris*) - no_undefined_flag_FC=' -z defs' -- if test yes = "$GCC"; then -- wlarc='$wl' -- archive_cmds_FC='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ wlarc='${wl}' -+ archive_cmds_FC='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' -- archive_cmds_FC='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' -+ archive_cmds_FC='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' -+ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) -- wlarc='$wl' -- archive_cmds_FC='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ wlarc='${wl}' -+ archive_cmds_FC='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi -@@ -19097,11 +18317,11 @@ - solaris2.[0-5] | solaris2.[0-5].*) ;; - *) - # The compiler driver will combine and reorder linker options, -- # but understands '-z linker_flag'. GCC discards it without '$wl', -+ # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) -- if test yes = "$GCC"; then -- whole_archive_flag_spec_FC='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' -+ if test "$GCC" = yes; then -+ whole_archive_flag_spec_FC='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - whole_archive_flag_spec_FC='-z allextract$convenience -z defaultextract' - fi -@@ -19111,10 +18331,10 @@ - ;; - - sunos4*) -- if test sequent = "$host_vendor"; then -+ if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. -- archive_cmds_FC='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -19163,43 +18383,43 @@ - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) -- no_undefined_flag_FC='$wl-z,text' -+ no_undefined_flag_FC='${wl}-z,text' - archive_cmds_need_lc_FC=no - hardcode_shlibpath_var_FC=no - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- archive_cmds_FC='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_FC='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- archive_cmds_FC='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_FC='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) -- # Note: We CANNOT use -z defs as we might desire, because we do not -+ # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. -- no_undefined_flag_FC='$wl-z,text' -- allow_undefined_flag_FC='$wl-z,nodefs' -+ no_undefined_flag_FC='${wl}-z,text' -+ allow_undefined_flag_FC='${wl}-z,nodefs' - archive_cmds_need_lc_FC=no - hardcode_shlibpath_var_FC=no -- hardcode_libdir_flag_spec_FC='$wl-R,$libdir' -+ hardcode_libdir_flag_spec_FC='${wl}-R,$libdir' - hardcode_libdir_separator_FC=':' - link_all_deplibs_FC=yes -- export_dynamic_flag_spec_FC='$wl-Bexport' -+ export_dynamic_flag_spec_FC='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- archive_cmds_FC='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_FC='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- archive_cmds_FC='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds_FC='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - -@@ -19214,10 +18434,10 @@ - ;; - esac - -- if test sni = "$host_vendor"; then -+ if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) -- export_dynamic_flag_spec_FC='$wl-Blargedynsym' -+ export_dynamic_flag_spec_FC='${wl}-Blargedynsym' - ;; - esac - fi -@@ -19225,7 +18445,7 @@ - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_FC" >&5 - $as_echo "$ld_shlibs_FC" >&6; } --test no = "$ld_shlibs_FC" && can_build_shared=no -+test "$ld_shlibs_FC" = no && can_build_shared=no - - with_gnu_ld_FC=$with_gnu_ld - -@@ -19242,7 +18462,7 @@ - # Assume -lc should be added - archive_cmds_need_lc_FC=yes - -- if test yes,yes = "$GCC,$enable_shared"; then -+ if test "$enable_shared" = yes && test "$GCC" = yes; then - case $archive_cmds_FC in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. -@@ -19370,7 +18590,7 @@ - library_names_spec= - libname_spec='lib$name' - soname_spec= --shrext_cmds=.so -+shrext_cmds=".so" - postinstall_cmds= - postuninstall_cmds= - finish_cmds= -@@ -19387,16 +18607,14 @@ - # flags to be left without arguments - need_version=unknown - -- -- - case $host_os in - aix3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname.a' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. -- soname_spec='$libname$release$shared_ext$major' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - - aix[4-9]*) -@@ -19404,91 +18622,41 @@ - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 -- library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with -- # the line '#! .'. This would cause the generated library to -- # depend on '.', always an invalid library. This was fixed in -+ # the line `#! .'. This would cause the generated library to -+ # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[01] | aix4.[01].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' -- echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then -+ echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac -- # Using Import Files as archive members, it is possible to support -- # filename-based versioning of shared library archives on AIX. While -- # this would work for both with and without runtime linking, it will -- # prevent static linking of such archives. So we do filename-based -- # shared library versioning with .so extension only, which is used -- # when both runtime linking and shared linking is enabled. -- # Unfortunately, runtime linking may impact performance, so we do -- # not want this to be the default eventually. Also, we use the -- # versioned .so libs for executables only if there is the -brtl -- # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. -- # To allow for filename-based versioning support, we need to create -- # libNAME.so.V as an archive file, containing: -- # *) an Import File, referring to the versioned filename of the -- # archive as well as the shared archive member, telling the -- # bitwidth (32 or 64) of that shared object, and providing the -- # list of exported symbols of that shared object, eventually -- # decorated with the 'weak' keyword -- # *) the shared object with the F_LOADONLY flag set, to really avoid -- # it being seen by the linker. -- # At run time we better use the real file rather than another symlink, -- # but for link time we create the symlink libNAME.so -> libNAME.so.V -- -- case $with_aix_soname,$aix_use_runtimelinking in -- # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct -+ # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. -- aix,yes) # traditional libtool -- dynamic_linker='AIX unversionable lib.so' -+ if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib<name>.so - # instead of lib<name>.a to let people know that these are not - # typical AIX shared libraries. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- ;; -- aix,no) # traditional AIX only -- dynamic_linker='AIX lib.a(lib.so.V)' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- ;; -- svr4,*) # full svr4 only -- dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o)" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,yes) # both, prefer svr4 -- dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o), lib.a(lib.so.V)" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # unpreferred sharedlib libNAME.a needs extra handling -- postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' -- postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,no) # both, prefer aix -- dynamic_linker="AIX lib.a(lib.so.V), lib.so.V($shared_archive_member_spec.o)" -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling -- postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' -- postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' -- ;; -- esac -+ library_names_spec='${libname}${release}.a $libname.a' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ fi - shlibpath_var=LIBPATH - fi - ;; -@@ -19498,18 +18666,18 @@ - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. -- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' -+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - - beos*) -- library_names_spec='$libname$shared_ext' -+ library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; -@@ -19517,8 +18685,8 @@ - bsdi[45]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" -@@ -19530,7 +18698,7 @@ - - cygwin* | mingw* | pw32* | cegcc*) - version_type=windows -- shrext_cmds=.dll -+ shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - -@@ -19539,8 +18707,8 @@ - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ -@@ -19556,16 +18724,16 @@ - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' -- soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix -- soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' -- library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' -@@ -19574,8 +18742,8 @@ - *,cl*) - # Native MSVC - libname_spec='$name' -- soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -- library_names_spec='$libname.dll.lib' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) -@@ -19602,7 +18770,7 @@ - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) -- sys_lib_search_path_spec=$LIB -+ sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -@@ -19615,8 +18783,8 @@ - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' -@@ -19629,7 +18797,7 @@ - - *) - # Assume MSVC wrapper -- library_names_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext $libname.lib' -+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac -@@ -19642,8 +18810,8 @@ - version_type=darwin - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$major$shared_ext' -+ library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' -+ soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' -@@ -19655,8 +18823,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -19674,13 +18842,12 @@ - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac -@@ -19705,15 +18872,26 @@ - esac - ;; - -+gnu*) -+ version_type=linux # correct to gnu/linux during the next big refactor -+ need_lib_prefix=no -+ need_version=no -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ shlibpath_var=LD_LIBRARY_PATH -+ shlibpath_overrides_runpath=no -+ hardcode_into_libs=yes -+ ;; -+ - haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH -- shlibpath_overrides_runpath=no -+ shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; -@@ -19731,15 +18909,14 @@ - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -- if test 32 = "$HPUX_IA64_MODE"; then -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux32 - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux64 - fi -+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' -@@ -19747,8 +18924,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; -@@ -19757,8 +18934,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... -@@ -19771,8 +18948,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -19783,7 +18960,7 @@ - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix -@@ -19791,8 +18968,8 @@ - esac - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= -@@ -19811,8 +18988,8 @@ - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no -- sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" -- sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" -+ sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" -+ sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -@@ -19821,33 +18998,13 @@ - dynamic_linker=no - ;; - --linux*android*) -- version_type=none # Android doesn't support versioned libraries. -- need_lib_prefix=no -- need_version=no -- library_names_spec='$libname$release$shared_ext' -- soname_spec='$libname$release$shared_ext' -- finish_cmds= -- shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -- -- # This implies no fast_install, which is unacceptable. -- # Some rework will be needed to allow for fast_install -- # before this can be enabled. -- hardcode_into_libs=yes -- -- dynamic_linker='Android linker' -- # Don't embed -rpath directories since the linker doesn't support them. -- hardcode_libdir_flag_spec_FC='-L$libdir' -- ;; -- - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -19885,15 +19042,14 @@ - # before this can be enabled. - hardcode_into_libs=yes - -- # Ideally, we could use ldconfig to report *all* directores which are -- # searched for libraries, however this is still not possible. Aside from not -- # being certain /sbin/ldconfig is available, command -- # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, -- # even though it is searched at run-time. Try to do the best guess by -- # appending ld.so.conf contents (and includes) to the search path. -+ # Add ABI-specific directories to the system library path. -+ sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" -+ -+ # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` -- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" -+ sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" -+ - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on -@@ -19910,12 +19066,12 @@ - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH -@@ -19925,7 +19081,7 @@ - - newsos6) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; -@@ -19934,68 +19090,58 @@ - version_type=qnx - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - --openbsd* | bitrig*) -+openbsd*) - version_type=sunos -- sys_lib_dlsearch_path_spec=/usr/lib -+ sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -- need_version=no -- else -- need_version=yes -- fi -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. -+ case $host_os in -+ openbsd3.3 | openbsd3.3.*) need_version=yes ;; -+ *) need_version=no ;; -+ esac -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ case $host_os in -+ openbsd2.[89] | openbsd2.[89].*) -+ shlibpath_overrides_runpath=no -+ ;; -+ *) -+ shlibpath_overrides_runpath=yes -+ ;; -+ esac -+ else -+ shlibpath_overrides_runpath=yes -+ fi - ;; - - os2*) - libname_spec='$name' -- version_type=windows -- shrext_cmds=.dll -- need_version=no -+ shrext_cmds=".dll" - need_lib_prefix=no -- # OS/2 can only load a DLL with a base name of 8 characters or less. -- soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; -- v=$($ECHO $release$versuffix | tr -d .-); -- n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); -- $ECHO $n$v`$shared_ext' -- library_names_spec='${libname}_dll.$libext' -+ library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' -- shlibpath_var=BEGINLIBPATH -- sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ -- dldir=$destdir/`dirname \$dlpath`~ -- test -d \$dldir || mkdir -p \$dldir~ -- $install_prog $dir/$dlname \$dldir/$dlname~ -- chmod a+x \$dldir/$dlname~ -- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then -- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; -- fi' -- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ -- dlpath=$dir/\$dldll~ -- $RM \$dlpath' -+ shlibpath_var=LIBPATH - ;; - - osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -+ sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - - rdos*) -@@ -20006,8 +19152,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -@@ -20017,11 +19163,11 @@ - - sunos4*) - version_type=sunos -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes -@@ -20029,8 +19175,8 @@ - - sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) -@@ -20051,24 +19197,24 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' -- soname_spec='$libname$shared_ext.$major' -+ library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' -+ soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) -- version_type=sco -+ version_type=freebsd-elf - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' -@@ -20086,7 +19232,7 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes -@@ -20094,8 +19240,8 @@ - - uts4*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -20105,32 +19251,20 @@ - esac - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 - $as_echo "$dynamic_linker" >&6; } --test no = "$dynamic_linker" && can_build_shared=no -+test "$dynamic_linker" = no && can_build_shared=no - - variables_saved_for_relink="PATH $shlibpath_var $runpath_var" --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" - fi - --if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then -- sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec -+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then -+ sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" - fi -- --if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then -- sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec -+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then -+ sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" - fi - --# remember unaugmented sys_lib_dlsearch_path content for libtool script decls... --configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec -- --# ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code --func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" -- --# to be used as default LT_SYS_LIBRARY_PATH value in generated libtool --configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH -- -- -- - - - -@@ -20173,15 +19307,15 @@ - hardcode_action_FC= - if test -n "$hardcode_libdir_flag_spec_FC" || - test -n "$runpath_var_FC" || -- test yes = "$hardcode_automatic_FC"; then -+ test "X$hardcode_automatic_FC" = "Xyes" ; then - - # We can hardcode non-existent directories. -- if test no != "$hardcode_direct_FC" && -+ if test "$hardcode_direct_FC" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one -- ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, FC)" && -- test no != "$hardcode_minus_L_FC"; then -+ ## test "$_LT_TAGVAR(hardcode_shlibpath_var, FC)" != no && -+ test "$hardcode_minus_L_FC" != no; then - # Linking always hardcodes the temporary library directory. - hardcode_action_FC=relink - else -@@ -20196,12 +19330,12 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_FC" >&5 - $as_echo "$hardcode_action_FC" >&6; } - --if test relink = "$hardcode_action_FC" || -- test yes = "$inherit_rpath_FC"; then -+if test "$hardcode_action_FC" = relink || -+ test "$inherit_rpath_FC" = yes; then - # Fast installation is not supported - enable_fast_install=no --elif test yes = "$shlibpath_overrides_runpath" || -- test no = "$enable_shared"; then -+elif test "$shlibpath_overrides_runpath" = yes || -+ test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless - fi -@@ -20217,7 +19351,7 @@ - GCC=$lt_save_GCC - CC=$lt_save_CC - CFLAGS=$lt_save_CFLAGS --fi # test yes != "$_lt_disable_FC" -+fi # test "$_lt_disable_FC" != yes - - ac_ext=c - ac_cpp='$CPP $CPPFLAGS' -@@ -22168,6 +21302,31 @@ - done - fi - -+# -+# IME specific build path. -+# Automatically adds IME to the file systems if not present -+# -+if test -n "${with_ime}" ; then -+ IME_INSTALL_PATH="${with_ime}" -+ -+fi -+ -+ime_default_path="/opt/ddn/ime" -+if test -n "${with_ime}" -o -e "${ime_default_path}/include/ime_native.h"; then -+ # Use IME and the default path, if not specified or overriden -+ with_ime="${ime_default_path}" -+ file_system_ime=1 -+fi -+ -+if test -n "${file_system_ime}" ; then -+ CFLAGS="$CFLAGS -I${with_ime}/include" -+ CPPFLAGS="$CPPFLAGS -I${with_ime}/include" -+ LDFLAGS="$LDFLAGS -L${with_ime}/lib" -+ LIBS="-lim_client -lim_common" -+ export LD_LIBRARY_PATH="${with_ime}/lib:$LD_LIBRARY_PATH" -+fi -+ -+ - ############################################# - # This PVFS2 logic is special because it's hard to get it right if it comes - # before the known_filesystems check loop above. So we handle it down here, -@@ -25947,7 +25106,6 @@ - enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' - pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' - enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' --shared_archive_member_spec='`$ECHO "$shared_archive_member_spec" | $SED "$delay_single_quote_subst"`' - SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' - ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' - PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`' -@@ -25997,13 +25155,10 @@ - GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' --lt_cv_sys_global_symbol_to_import='`$ECHO "$lt_cv_sys_global_symbol_to_import" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' --lt_cv_nm_interface='`$ECHO "$lt_cv_nm_interface" | $SED "$delay_single_quote_subst"`' - nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' - lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' --lt_cv_truncate_bin='`$ECHO "$lt_cv_truncate_bin" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' -@@ -26068,8 +25223,7 @@ - finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' - hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' - sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' --configure_time_dlsearch_path='`$ECHO "$configure_time_dlsearch_path" | $SED "$delay_single_quote_subst"`' --configure_time_lt_sys_library_path='`$ECHO "$configure_time_lt_sys_library_path" | $SED "$delay_single_quote_subst"`' -+sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`' - hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' - enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' - enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' -@@ -26222,12 +25376,9 @@ - compiler \ - lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ --lt_cv_sys_global_symbol_to_import \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ --lt_cv_nm_interface \ - nm_file_list_spec \ --lt_cv_truncate_bin \ - lt_prog_compiler_no_builtin_flag \ - lt_prog_compiler_pic \ - lt_prog_compiler_wl \ -@@ -26318,7 +25469,7 @@ - compiler_lib_search_path_FC; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) -- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes -+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" -@@ -26345,8 +25496,7 @@ - postuninstall_cmds \ - finish_cmds \ - sys_lib_search_path_spec \ --configure_time_dlsearch_path \ --configure_time_lt_sys_library_path \ -+sys_lib_dlsearch_path_spec \ - reload_cmds_F77 \ - reload_cmds_FC \ - old_archive_cmds_F77 \ -@@ -26371,7 +25521,7 @@ - postlink_cmds_FC; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) -- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes -+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" -@@ -26380,16 +25530,19 @@ - done - - ac_aux_dir='$ac_aux_dir' -+xsi_shell='$xsi_shell' -+lt_shell_append='$lt_shell_append' - --# See if we are running on zsh, and set the options that allow our -+# See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes INIT. --if test -n "\${ZSH_VERSION+set}"; then -+if test -n "\${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - - - PACKAGE='$PACKAGE' - VERSION='$VERSION' -+ TIMESTAMP='$TIMESTAMP' - RM='$RM' - ofile='$ofile' - -@@ -27122,52 +26275,55 @@ - ;; - "libtool":C) - -- # See if we are running on zsh, and set the options that allow our -+ # See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes. -- if test -n "${ZSH_VERSION+set}"; then -+ if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - -- cfgfile=${ofile}T -+ cfgfile="${ofile}T" - trap "$RM \"$cfgfile\"; exit 1" 1 2 15 - $RM "$cfgfile" - - cat <<_LT_EOF >> "$cfgfile" - #! $SHELL --# Generated automatically by $as_me ($PACKAGE) $VERSION --# NOTE: Changes made to this file will be lost: look at ltmain.sh. -- --# Provide generalized library-building support services. --# Written by Gordon Matzigkeit, 1996 - --# Copyright (C) 2014 Free Software Foundation, Inc. --# This is free software; see the source for copying conditions. There is NO --# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -- --# GNU Libtool is free software; you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation; either version 2 of of the License, or --# (at your option) any later version. --# --# As a special exception to the GNU General Public License, if you --# distribute this file as part of a program or library that is built --# using GNU Libtool, you may include this file under the same --# distribution terms that you use for the rest of that program. -+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. -+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION -+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: -+# NOTE: Changes made to this file will be lost: look at ltmain.sh. -+# -+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -+# 2006, 2007, 2008, 2009, 2010, 2011 Free Software -+# Foundation, Inc. -+# Written by Gordon Matzigkeit, 1996 -+# -+# This file is part of GNU Libtool. -+# -+# GNU Libtool is free software; you can redistribute it and/or -+# modify it under the terms of the GNU General Public License as -+# published by the Free Software Foundation; either version 2 of -+# the License, or (at your option) any later version. -+# -+# As a special exception to the GNU General Public License, -+# if you distribute this file as part of a program or library that -+# is built using GNU Libtool, you may include this file under the -+# same distribution terms that you use for the rest of that program. - # --# GNU Libtool is distributed in the hope that it will be useful, but --# WITHOUT ANY WARRANTY; without even the implied warranty of -+# GNU Libtool is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of - # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - # GNU General Public License for more details. - # - # You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -+# along with GNU Libtool; see the file COPYING. If not, a copy -+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or -+# obtained by writing to the Free Software Foundation, Inc., -+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - - - # The names of the tagged configurations supported by this script. --available_tags='F77 FC ' -- --# Configured defaults for sys_lib_dlsearch_path munging. --: \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"} -+available_tags="F77 FC " - - # ### BEGIN LIBTOOL CONFIG - -@@ -27187,9 +26343,6 @@ - # Whether or not to optimize for fast installation. - fast_install=$enable_fast_install - --# Shared archive member basename,for filename based shared library versioning on AIX. --shared_archive_member_spec=$shared_archive_member_spec -- - # Shell to use when invoking shell scripts. - SHELL=$lt_SHELL - -@@ -27307,27 +26460,18 @@ - # Transform the output of nm in a proper C declaration. - global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl - --# Transform the output of nm into a list of symbols to manually relocate. --global_symbol_to_import=$lt_lt_cv_sys_global_symbol_to_import -- - # Transform the output of nm in a C name address pair. - global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - --# The name lister interface. --nm_interface=$lt_lt_cv_nm_interface -- - # Specify filename containing input files for \$NM. - nm_file_list_spec=$lt_nm_file_list_spec - --# The root where to search for dependent libraries,and where our libraries should be installed. -+# The root where to search for dependent libraries,and in which our libraries should be installed. - lt_sysroot=$lt_sysroot - --# Command to truncate a binary pipe. --lt_truncate_bin=$lt_lt_cv_truncate_bin -- - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -27418,11 +26562,8 @@ - # Compile-time system search path for libraries. - sys_lib_search_path_spec=$lt_sys_lib_search_path_spec - --# Detected run-time system search path for libraries. --sys_lib_dlsearch_path_spec=$lt_configure_time_dlsearch_path -- --# Explicit LT_SYS_LIBRARY_PATH set during ./configure time. --configure_time_lt_sys_library_path=$lt_configure_time_lt_sys_library_path -+# Run-time system search path for libraries. -+sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec - - # Whether dlopen is supported. - dlopen_support=$enable_dlopen -@@ -27515,13 +26656,13 @@ - # Whether we need a single "-rpath" flag with a separated argument. - hardcode_libdir_separator=$lt_hardcode_libdir_separator - --# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes -+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes - # DIR into the resulting binary. - hardcode_direct=$hardcode_direct - --# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes -+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes - # DIR into the resulting binary and the resulting library dependency is --# "absolute",i.e impossible to change by setting \$shlibpath_var if the -+# "absolute",i.e impossible to change by setting \${shlibpath_var} if the - # library is relocated. - hardcode_direct_absolute=$hardcode_direct_absolute - -@@ -27587,72 +26728,13 @@ - - _LT_EOF - -- cat <<'_LT_EOF' >> "$cfgfile" -- --# ### BEGIN FUNCTIONS SHARED WITH CONFIGURE -- --# func_munge_path_list VARIABLE PATH --# ----------------------------------- --# VARIABLE is name of variable containing _space_ separated list of --# directories to be munged by the contents of PATH, which is string --# having a format: --# "DIR[:DIR]:" --# string "DIR[ DIR]" will be prepended to VARIABLE --# ":DIR[:DIR]" --# string "DIR[ DIR]" will be appended to VARIABLE --# "DIRP[:DIRP]::[DIRA:]DIRA" --# string "DIRP[ DIRP]" will be prepended to VARIABLE and string --# "DIRA[ DIRA]" will be appended to VARIABLE --# "DIR[:DIR]" --# VARIABLE will be replaced by "DIR[ DIR]" --func_munge_path_list () --{ -- case x$2 in -- x) -- ;; -- *:) -- eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\" -- ;; -- x:*) -- eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\" -- ;; -- *::*) -- eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" -- eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\" -- ;; -- *) -- eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\" -- ;; -- esac --} -- -- --# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. --func_cc_basename () --{ -- for cc_temp in $*""; do -- case $cc_temp in -- compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -- distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -- \-*) ;; -- *) break;; -- esac -- done -- func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` --} -- -- --# ### END FUNCTIONS SHARED WITH CONFIGURE -- --_LT_EOF -- - case $host_os in - aix3*) - cat <<\_LT_EOF >> "$cfgfile" - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. --if test set != "${COLLECT_NAMES+set}"; then -+if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi -@@ -27661,7 +26743,7 @@ - esac - - --ltmain=$ac_aux_dir/ltmain.sh -+ltmain="$ac_aux_dir/ltmain.sh" - - - # We use sed instead of cat because bash on DJGPP gets confused if -@@ -27671,6 +26753,165 @@ - sed '$q' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ - mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" -@@ -27757,13 +26998,13 @@ - # Whether we need a single "-rpath" flag with a separated argument. - hardcode_libdir_separator=$lt_hardcode_libdir_separator_F77 - --# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes -+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes - # DIR into the resulting binary. - hardcode_direct=$hardcode_direct_F77 - --# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes -+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes - # DIR into the resulting binary and the resulting library dependency is --# "absolute",i.e impossible to change by setting \$shlibpath_var if the -+# "absolute",i.e impossible to change by setting \${shlibpath_var} if the - # library is relocated. - hardcode_direct_absolute=$hardcode_direct_absolute_F77 - -@@ -27910,13 +27151,13 @@ - # Whether we need a single "-rpath" flag with a separated argument. - hardcode_libdir_separator=$lt_hardcode_libdir_separator_FC - --# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes -+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes - # DIR into the resulting binary. - hardcode_direct=$hardcode_direct_FC - --# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes -+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes - # DIR into the resulting binary and the resulting library dependency is --# "absolute",i.e impossible to change by setting \$shlibpath_var if the -+# "absolute",i.e impossible to change by setting \${shlibpath_var} if the - # library is relocated. - hardcode_direct_absolute=$hardcode_direct_absolute_FC - -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/configure.ac psmpi-5.4.6-1/mpich2/src/mpi/romio/configure.ac ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/configure.ac 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/configure.ac 2020-07-16 10:45:36.787618827 +0200 -@@ -226,6 +226,8 @@ - --with-pvfs2=path - Path to installation of PVFS (version 2)],,) - AC_ARG_WITH(mpi-impl,[ - --with-mpi-impl=name - Specify MPI implementation to build ROMIO for],,) -+AC_ARG_WITH(ime,[ -+--with-ime=PATH - Path to installation of IME],,) - dnl - AC_ARG_WITH(mpi, [ - --with-mpi=path - Path to instalation of MPI (headers, libs, etc)],,) -@@ -776,6 +778,30 @@ - done - fi - -+# -+# IME specific build path. -+# Automatically adds IME to the file systems if not present -+# -+if test -n "${with_ime}" ; then -+ AC_SUBST(IME_INSTALL_PATH, ["${with_ime}"]) -+fi -+ -+ime_default_path="/opt/ddn/ime" -+if test -n "${with_ime}" -o -e "${ime_default_path}/include/ime_native.h"; then -+ # Use IME and the default path, if not specified or overriden -+ with_ime="${ime_default_path}" -+ file_system_ime=1 -+fi -+ -+if test -n "${file_system_ime}" ; then -+ CFLAGS="$CFLAGS -I${with_ime}/include" -+ CPPFLAGS="$CPPFLAGS -I${with_ime}/include" -+ LDFLAGS="$LDFLAGS -L${with_ime}/lib" -+ LIBS="-lim_client -lim_common" -+ export LD_LIBRARY_PATH="${with_ime}/lib:$LD_LIBRARY_PATH" -+fi -+ -+ - ############################################# - # This PVFS2 logic is special because it's hard to get it right if it comes - # before the known_filesystems check loop above. So we handle it down here, -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/Makefile.in psmpi-5.4.6-1/mpich2/src/mpi/romio/Makefile.in ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/Makefile.in 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/Makefile.in 2020-07-16 10:48:35.305062584 +0200 -@@ -21,17 +21,7 @@ - # - - --am__is_gnu_make = { \ -- if test -z '$(MAKELEVEL)'; then \ -- false; \ -- elif test -n '$(MAKE_HOST)'; then \ -- true; \ -- elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ -- true; \ -- else \ -- false; \ -- fi; \ --} -+am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' - am__make_running_with_option = \ - case $${target_option-} in \ - ?) ;; \ -@@ -94,6 +84,44 @@ - POST_UNINSTALL = : - build_triplet = @build@ - host_triplet = @host@ -+DIST_COMMON = $(srcdir)/mpi-io/Makefile.mk \ -+ $(top_srcdir)/mpi-io/glue/Makefile.mk \ -+ $(top_srcdir)/mpi-io/glue/default/Makefile.mk \ -+ $(top_srcdir)/mpi-io/glue/mpich/Makefile.mk \ -+ $(top_srcdir)/mpi-io/fortran/Makefile.mk \ -+ $(srcdir)/adio/Makefile.mk \ -+ $(top_srcdir)/adio/ad_gpfs/Makefile.mk \ -+ $(top_srcdir)/adio/ad_gpfs/bg/Makefile.mk \ -+ $(top_srcdir)/adio/ad_gpfs/pe/Makefile.mk \ -+ $(top_srcdir)/adio/ad_lustre/Makefile.mk \ -+ $(top_srcdir)/adio/ad_nfs/Makefile.mk \ -+ $(top_srcdir)/adio/ad_panfs/Makefile.mk \ -+ $(top_srcdir)/adio/ad_pvfs2/Makefile.mk \ -+ $(top_srcdir)/adio/ad_testfs/Makefile.mk \ -+ $(top_srcdir)/adio/ad_ufs/Makefile.mk \ -+ $(top_srcdir)/adio/ad_xfs/Makefile.mk \ -+ $(top_srcdir)/adio/ad_ime/Makefile.mk \ -+ $(top_srcdir)/adio/common/Makefile.mk $(srcdir)/Makefile.in \ -+ $(srcdir)/Makefile.am $(top_srcdir)/configure \ -+ $(am__configure_deps) \ -+ $(top_srcdir)/adio/include/romioconf.h.in \ -+ $(srcdir)/localdefs.in \ -+ $(top_srcdir)/mpi2-other/info/Makefile.in \ -+ $(top_srcdir)/mpi2-other/array/Makefile.in \ -+ $(top_srcdir)/util/romioinstall.in \ -+ $(top_srcdir)/include/mpio.h.in \ -+ $(top_srcdir)/include/mpiof.h.in \ -+ $(top_srcdir)/mpi2-other/info/fortran/Makefile.in \ -+ $(top_srcdir)/mpi2-other/array/fortran/Makefile.in \ -+ $(top_srcdir)/confdb/depcomp $(include_HEADERS) \ -+ $(am__noinst_HEADERS_DIST) README confdb/ar-lib confdb/compile \ -+ confdb/config.guess confdb/config.rpath confdb/config.sub \ -+ confdb/depcomp confdb/install-sh confdb/missing \ -+ confdb/ltmain.sh $(top_srcdir)/confdb/ar-lib \ -+ $(top_srcdir)/confdb/compile $(top_srcdir)/confdb/config.guess \ -+ $(top_srcdir)/confdb/config.sub \ -+ $(top_srcdir)/confdb/install-sh $(top_srcdir)/confdb/ltmain.sh \ -+ $(top_srcdir)/confdb/missing - @MPIO_GLUE_DEFAULT_TRUE@am__append_1 = \ - @MPIO_GLUE_DEFAULT_TRUE@ mpi-io/glue/default/mpio_file.c \ - @MPIO_GLUE_DEFAULT_TRUE@ mpi-io/glue/default/mpio_err.c -@@ -329,9 +357,6 @@ - $(top_srcdir)/configure.ac - am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) --DIST_COMMON = $(srcdir)/Makefile.am $(top_srcdir)/configure \ -- $(am__configure_deps) $(include_HEADERS) \ -- $(am__noinst_HEADERS_DIST) $(am__DIST_COMMON) - am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ - configure.lineno config.status.lineno - mkinstalldirs = $(install_sh) -d -@@ -1403,39 +1428,6 @@ - ETAGS = etags - CTAGS = ctags - CSCOPE = cscope --am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/adio/Makefile.mk \ -- $(srcdir)/localdefs.in $(srcdir)/mpi-io/Makefile.mk \ -- $(top_srcdir)/adio/ad_gpfs/Makefile.mk \ -- $(top_srcdir)/adio/ad_gpfs/bg/Makefile.mk \ -- $(top_srcdir)/adio/ad_gpfs/pe/Makefile.mk \ -- $(top_srcdir)/adio/ad_ime/Makefile.mk \ -- $(top_srcdir)/adio/ad_lustre/Makefile.mk \ -- $(top_srcdir)/adio/ad_nfs/Makefile.mk \ -- $(top_srcdir)/adio/ad_panfs/Makefile.mk \ -- $(top_srcdir)/adio/ad_pvfs2/Makefile.mk \ -- $(top_srcdir)/adio/ad_testfs/Makefile.mk \ -- $(top_srcdir)/adio/ad_ufs/Makefile.mk \ -- $(top_srcdir)/adio/ad_xfs/Makefile.mk \ -- $(top_srcdir)/adio/common/Makefile.mk \ -- $(top_srcdir)/adio/include/romioconf.h.in \ -- $(top_srcdir)/confdb/ar-lib $(top_srcdir)/confdb/compile \ -- $(top_srcdir)/confdb/config.guess \ -- $(top_srcdir)/confdb/config.sub $(top_srcdir)/confdb/depcomp \ -- $(top_srcdir)/confdb/install-sh $(top_srcdir)/confdb/ltmain.sh \ -- $(top_srcdir)/confdb/missing $(top_srcdir)/include/mpio.h.in \ -- $(top_srcdir)/include/mpiof.h.in \ -- $(top_srcdir)/mpi-io/fortran/Makefile.mk \ -- $(top_srcdir)/mpi-io/glue/Makefile.mk \ -- $(top_srcdir)/mpi-io/glue/default/Makefile.mk \ -- $(top_srcdir)/mpi-io/glue/mpich/Makefile.mk \ -- $(top_srcdir)/mpi2-other/array/Makefile.in \ -- $(top_srcdir)/mpi2-other/array/fortran/Makefile.in \ -- $(top_srcdir)/mpi2-other/info/Makefile.in \ -- $(top_srcdir)/mpi2-other/info/fortran/Makefile.in \ -- $(top_srcdir)/util/romioinstall.in README confdb/ar-lib \ -- confdb/compile confdb/config.guess confdb/config.rpath \ -- confdb/config.sub confdb/depcomp confdb/install-sh \ -- confdb/ltmain.sh confdb/missing - DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) - distdir = $(PACKAGE)-$(VERSION) - top_distdir = $(distdir) -@@ -1535,6 +1527,7 @@ - HAVE_MPI_DARRAY_SUBARRAY = @HAVE_MPI_DARRAY_SUBARRAY@ - HAVE_MPI_INFO = @HAVE_MPI_INFO@ - HAVE_WEAK_SYMBOLS = @HAVE_WEAK_SYMBOLS@ -+IME_INSTALL_PATH = @IME_INSTALL_PATH@ - INSTALL = @INSTALL@ - INSTALL_DATA = @INSTALL_DATA@ - INSTALL_PROGRAM = @INSTALL_PROGRAM@ -@@ -1550,7 +1543,6 @@ - LL = @LL@ - LN_S = @LN_S@ - LTLIBOBJS = @LTLIBOBJS@ --LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ - MAINT = @MAINT@ - MAKE = @MAKE@ - MAKEINFO = @MAKEINFO@ -@@ -1892,6 +1884,7 @@ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ - $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign Makefile -+.PRECIOUS: Makefile - Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ -@@ -1901,7 +1894,7 @@ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles);; \ - esac; --$(srcdir)/mpi-io/Makefile.mk $(top_srcdir)/mpi-io/glue/Makefile.mk $(top_srcdir)/mpi-io/glue/default/Makefile.mk $(top_srcdir)/mpi-io/glue/mpich/Makefile.mk $(top_srcdir)/mpi-io/fortran/Makefile.mk $(srcdir)/adio/Makefile.mk $(top_srcdir)/adio/ad_gpfs/Makefile.mk $(top_srcdir)/adio/ad_gpfs/bg/Makefile.mk $(top_srcdir)/adio/ad_gpfs/pe/Makefile.mk $(top_srcdir)/adio/ad_lustre/Makefile.mk $(top_srcdir)/adio/ad_nfs/Makefile.mk $(top_srcdir)/adio/ad_panfs/Makefile.mk $(top_srcdir)/adio/ad_pvfs2/Makefile.mk $(top_srcdir)/adio/ad_testfs/Makefile.mk $(top_srcdir)/adio/ad_ufs/Makefile.mk $(top_srcdir)/adio/ad_xfs/Makefile.mk $(top_srcdir)/adio/ad_ime/Makefile.mk $(top_srcdir)/adio/common/Makefile.mk $(am__empty): -+$(srcdir)/mpi-io/Makefile.mk $(top_srcdir)/mpi-io/glue/Makefile.mk $(top_srcdir)/mpi-io/glue/default/Makefile.mk $(top_srcdir)/mpi-io/glue/mpich/Makefile.mk $(top_srcdir)/mpi-io/fortran/Makefile.mk $(srcdir)/adio/Makefile.mk $(top_srcdir)/adio/ad_gpfs/Makefile.mk $(top_srcdir)/adio/ad_gpfs/bg/Makefile.mk $(top_srcdir)/adio/ad_gpfs/pe/Makefile.mk $(top_srcdir)/adio/ad_lustre/Makefile.mk $(top_srcdir)/adio/ad_nfs/Makefile.mk $(top_srcdir)/adio/ad_panfs/Makefile.mk $(top_srcdir)/adio/ad_pvfs2/Makefile.mk $(top_srcdir)/adio/ad_testfs/Makefile.mk $(top_srcdir)/adio/ad_ufs/Makefile.mk $(top_srcdir)/adio/ad_xfs/Makefile.mk $(top_srcdir)/adio/ad_ime/Makefile.mk $(top_srcdir)/adio/common/Makefile.mk: - - $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - $(SHELL) ./config.status --recheck -@@ -1913,8 +1906,8 @@ - $(am__aclocal_m4_deps): - - adio/include/romioconf.h: adio/include/stamp-h1 -- @test -f $@ || rm -f adio/include/stamp-h1 -- @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) adio/include/stamp-h1 -+ @if test ! -f $@; then rm -f adio/include/stamp-h1; else :; fi -+ @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) adio/include/stamp-h1; else :; fi - - adio/include/stamp-h1: $(top_srcdir)/adio/include/romioconf.h.in $(top_builddir)/config.status - @rm -f adio/include/stamp-h1 -@@ -4472,7 +4465,7 @@ - ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ - || chmod -R a+r "$(distdir)" - dist-gzip: distdir -- tardir=$(distdir) && $(am__tar) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).tar.gz -+ tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz - $(am__post_remove_distdir) - - dist-bzip2: distdir -@@ -4488,17 +4481,11 @@ - $(am__post_remove_distdir) - - dist-tarZ: distdir -- @echo WARNING: "Support for distribution archives compressed with" \ -- "legacy program 'compress' is deprecated." >&2 -- @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 - tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z - $(am__post_remove_distdir) - - dist-shar: distdir -- @echo WARNING: "Support for shar distribution archives is" \ -- "deprecated." >&2 -- @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 -- shar $(distdir) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).shar.gz -+ shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz - $(am__post_remove_distdir) - - dist-zip: distdir -@@ -4516,7 +4503,7 @@ - distcheck: dist - case '$(DIST_ARCHIVES)' in \ - *.tar.gz*) \ -- eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).tar.gz | $(am__untar) ;;\ -+ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ - *.tar.bz2*) \ - bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ - *.tar.lz*) \ -@@ -4526,23 +4513,22 @@ - *.tar.Z*) \ - uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ - *.shar.gz*) \ -- eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).shar.gz | unshar ;;\ -+ GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ - *.zip*) \ - unzip $(distdir).zip ;;\ - esac - chmod -R a-w $(distdir) - chmod u+w $(distdir) -- mkdir $(distdir)/_build $(distdir)/_build/sub $(distdir)/_inst -+ mkdir $(distdir)/_build $(distdir)/_inst - chmod a-w $(distdir) - test -d $(distdir)/_build || exit 0; \ - dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ - && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ - && am__cwd=`pwd` \ -- && $(am__cd) $(distdir)/_build/sub \ -- && ../../configure \ -+ && $(am__cd) $(distdir)/_build \ -+ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ - $(AM_DISTCHECK_CONFIGURE_FLAGS) \ - $(DISTCHECK_CONFIGURE_FLAGS) \ -- --srcdir=../.. --prefix="$$dc_install_base" \ - && $(MAKE) $(AM_MAKEFLAGS) \ - && $(MAKE) $(AM_MAKEFLAGS) dvi \ - && $(MAKE) $(AM_MAKEFLAGS) check \ -@@ -5577,8 +5563,6 @@ - tags tags-am uninstall uninstall-am uninstall-includeHEADERS \ - uninstall-libLTLIBRARIES uninstall-nodist_includeHEADERS - --.PRECIOUS: Makefile -- - - # -------------------------------------------------------------------------- - .PHONY: coverage -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/aclocal.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/aclocal.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/aclocal.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/aclocal.m4 2020-07-16 10:48:35.433405496 +0200 -@@ -76,8 +76,7 @@ - : ${AR=ar} - - AC_CACHE_CHECK([the archiver ($AR) interface], [am_cv_ar_interface], -- [AC_LANG_PUSH([C]) -- am_cv_ar_interface=ar -+ [am_cv_ar_interface=ar - AC_COMPILE_IFELSE([AC_LANG_SOURCE([[int some_variable = 0;]])], - [am_ar_try='$AR cru libconftest.a conftest.$ac_objext >&AS_MESSAGE_LOG_FD' - AC_TRY_EVAL([am_ar_try]) -@@ -94,7 +93,7 @@ - fi - rm -f conftest.lib libconftest.a - ]) -- AC_LANG_POP([C])]) -+ ]) - - case $am_cv_ar_interface in - ar) -@@ -467,12 +466,6 @@ - # This macro actually does too much. Some checks are only needed if - # your package does certain things. But this isn't really a big deal. - --dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O. --m4_define([AC_PROG_CC], --m4_defn([AC_PROG_CC]) --[_AM_PROG_CC_C_O --]) -- - # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) - # AM_INIT_AUTOMAKE([OPTIONS]) - # ----------------------------------------------- -@@ -548,8 +541,8 @@ - # <https://lists.gnu.org/archive/html/automake/2012-07/msg00001.html> - # <https://lists.gnu.org/archive/html/automake/2012-07/msg00014.html> - AC_SUBST([mkdir_p], ['$(MKDIR_P)']) --# We need awk for the "check" target (and possibly the TAP driver). The --# system "awk" is bad on some platforms. -+# We need awk for the "check" target. The system "awk" is bad on -+# some platforms. - AC_REQUIRE([AC_PROG_AWK])dnl - AC_REQUIRE([AC_PROG_MAKE_SET])dnl - AC_REQUIRE([AM_SET_LEADING_DOT])dnl -@@ -634,6 +627,7 @@ - m4_define([_AC_COMPILER_EXEEXT], - m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) - -+ - # When config.status generates a header, we must update the stamp-h file. - # This file resides in the same directory as the config header - # that is generated. The stamp files are numbered to have different names. -@@ -666,7 +660,7 @@ - # Define $install_sh. - AC_DEFUN([AM_PROG_INSTALL_SH], - [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl --if test x"${install_sh+set}" != xset; then -+if test x"${install_sh}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/compile psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/compile ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/compile 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/compile 2020-07-16 10:48:35.436080000 +0200 -@@ -255,8 +255,7 @@ - echo "compile $scriptversion" - exit $? - ;; -- cl | *[/\\]cl | cl.exe | *[/\\]cl.exe | \ -- icl | *[/\\]icl | icl.exe | *[/\\]icl.exe ) -+ cl | *[/\\]cl | cl.exe | *[/\\]cl.exe ) - func_cl_wrapper "$@" # Doesn't return... - ;; - esac -@@ -343,6 +342,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/config.guess psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/config.guess ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/config.guess 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/config.guess 2020-07-16 10:48:35.440388622 +0200 -@@ -610,9 +610,8 @@ - else - IBM_ARCH=powerpc - fi -- if [ -x /usr/bin/lslpp ] ; then -- IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | -- awk -F: '{ print $3 }' | sed s/[0-9]*$/0/` -+ if [ -x /usr/bin/oslevel ] ; then -+ IBM_REV=`/usr/bin/oslevel` - else - IBM_REV="$UNAME_VERSION.$UNAME_RELEASE" - fi -@@ -711,12 +710,12 @@ - # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess - # => hppa64-hp-hpux11.23 - -- if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | -+ if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | - grep -q __LP64__ - then -- HP_ARCH=hppa2.0w -+ HP_ARCH="hppa2.0w" - else -- HP_ARCH=hppa64 -+ HP_ARCH="hppa64" - fi - fi - echo "$HP_ARCH"-hp-hpux"$HPUX_REV" -@@ -1125,7 +1124,7 @@ - # uname -m prints for DJGPP always 'pc', but it prints nothing about - # the processor, so we play safe by assuming i586. - # Note: whatever this is, it MUST be the same as what config.sub -- # prints for the "djgpp" host, or else GDB configure will decide that -+ # prints for the "djgpp" host, or else GDB configury will decide that - # this is a cross-build. - echo i586-pc-msdosdjgpp - exit ;; -Binary files psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/.config.guess.swp and psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/.config.guess.swp differ -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/config.sub psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/config.sub ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/config.sub 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/config.sub 2020-07-16 10:48:35.444218004 +0200 -@@ -25,7 +25,7 @@ - # of the GNU General Public License, version 3 ("GPLv3"). - - --# Please send patches to <config-patches@gnu.org>. -+# Please send patches with a ChangeLog entry to config-patches@gnu.org. - # - # Configuration subroutine to validate and canonicalize a configuration type. - # Supply the specified configuration type as an argument. -@@ -53,7 +53,8 @@ - me=`echo "$0" | sed -e 's,.*/,,'` - - usage="\ --Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS -+Usage: $0 [OPTION] CPU-MFR-OPSYS -+ $0 [OPTION] ALIAS - - Canonicalize a configuration name. - -@@ -116,8 +117,8 @@ - case $maybe_os in - nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ - linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ -- knetbsd*-gnu* | netbsd*-gnu* | netbsd*-eabi* | \ -- kopensolaris*-gnu* | cloudabi*-eabi* | \ -+ knetbsd*-gnu* | netbsd*-gnu* | \ -+ kopensolaris*-gnu* | \ - storm-chaos* | os2-emx* | rtmk-nova*) - os=-$maybe_os - basic_machine=`echo "$1" | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` -@@ -251,18 +252,16 @@ - | arc | arceb \ - | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ - | avr | avr32 \ -- | ba \ - | be32 | be64 \ - | bfin \ -- | c4x | c8051 | clipper \ -+ | c4x | clipper \ - | d10v | d30v | dlx | dsp16xx \ -- | e2k | epiphany \ -- | fido | fr30 | frv | ft32 \ -+ | epiphany \ -+ | fido | fr30 | frv \ - | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ - | hexagon \ -- | i370 | i860 | i960 | ia16 | ia64 \ -+ | i370 | i860 | i960 | ia64 \ - | ip2k | iq2000 \ -- | k1om \ - | le32 | le64 \ - | lm32 \ - | m32c | m32r | m32rle | m68000 | m68k | m88k \ -@@ -280,10 +279,8 @@ - | mips64vr5900 | mips64vr5900el \ - | mipsisa32 | mipsisa32el \ - | mipsisa32r2 | mipsisa32r2el \ -- | mipsisa32r6 | mipsisa32r6el \ - | mipsisa64 | mipsisa64el \ - | mipsisa64r2 | mipsisa64r2el \ -- | mipsisa64r6 | mipsisa64r6el \ - | mipsisa64sb1 | mipsisa64sb1el \ - | mipsisa64sr71k | mipsisa64sr71kel \ - | mipsr5900 | mipsr5900el \ -@@ -326,10 +323,7 @@ - c6x) - basic_machine=tic6x-unknown - ;; -- leon|leon[3-9]) -- basic_machine=sparc-$basic_machine -- ;; -- m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) -+ m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) - basic_machine=$basic_machine-unknown - os=-none - ;; -@@ -374,20 +368,18 @@ - | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ - | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ - | avr-* | avr32-* \ -- | ba-* \ - | be32-* | be64-* \ - | bfin-* | bs2000-* \ - | c[123]* | c30-* | [cjt]90-* | c4x-* \ -- | c8051-* | clipper-* | craynv-* | cydra-* \ -+ | clipper-* | craynv-* | cydra-* \ - | d10v-* | d30v-* | dlx-* \ -- | e2k-* | elxsi-* \ -+ | elxsi-* \ - | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ - | h8300-* | h8500-* \ - | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ - | hexagon-* \ -- | i*86-* | i860-* | i960-* | ia16-* | ia64-* \ -+ | i*86-* | i860-* | i960-* | ia64-* \ - | ip2k-* | iq2000-* \ -- | k1om-* \ - | le32-* | le64-* \ - | lm32-* \ - | m32c-* | m32r-* | m32rle-* \ -@@ -407,10 +399,8 @@ - | mips64vr5900-* | mips64vr5900el-* \ - | mipsisa32-* | mipsisa32el-* \ - | mipsisa32r2-* | mipsisa32r2el-* \ -- | mipsisa32r6-* | mipsisa32r6el-* \ - | mipsisa64-* | mipsisa64el-* \ - | mipsisa64r2-* | mipsisa64r2el-* \ -- | mipsisa64r6-* | mipsisa64r6el-* \ - | mipsisa64sb1-* | mipsisa64sb1el-* \ - | mipsisa64sr71k-* | mipsisa64sr71kel-* \ - | mipsr5900-* | mipsr5900el-* \ -@@ -422,19 +412,16 @@ - | nios-* | nios2-* | nios2eb-* | nios2el-* \ - | none-* | np1-* | ns16k-* | ns32k-* \ - | open8-* \ -- | or1k*-* \ - | orion-* \ - | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ - | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ -- | pru-* \ - | pyramid-* \ -- | riscv32-* | riscv64-* \ - | rl78-* | romp-* | rs6000-* | rx-* \ - | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ - | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ - | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ - | sparclite-* \ -- | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \ -+ | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \ - | tahoe-* \ - | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ - | tile*-* \ -@@ -442,8 +429,6 @@ - | ubicom32-* \ - | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ - | vax-* \ -- | visium-* \ -- | wasm32-* \ - | we32k-* \ - | x86-* | x86_64-* | xc16x-* | xps100-* \ - | xstormy16-* | xtensa*-* \ -@@ -520,9 +505,6 @@ - basic_machine=i386-pc - os=-aros - ;; -- asmjs) -- basic_machine=asmjs-unknown -- ;; - aux) - basic_machine=m68k-apple - os=-aux -@@ -816,7 +798,7 @@ - os=-mingw64 - ;; - mingw32) -- basic_machine=i686-pc -+ basic_machine=i386-pc - os=-mingw32 - ;; - mingw32ce) -@@ -844,10 +826,6 @@ - basic_machine=powerpc-unknown - os=-morphos - ;; -- moxiebox) -- basic_machine=moxie-unknown -- os=-moxiebox -- ;; - msdos) - basic_machine=i386-pc - os=-msdos -@@ -856,7 +834,7 @@ - basic_machine=`echo "$basic_machine" | sed -e 's/ms1-/mt-/'` - ;; - msys) -- basic_machine=i686-pc -+ basic_machine=i386-pc - os=-msys - ;; - mvs) -@@ -943,9 +921,6 @@ - nsv-tandem) - basic_machine=nsv-tandem - ;; -- nsx-tandem) -- basic_machine=nsx-tandem -- ;; - op50n-* | op60c-*) - basic_machine=hppa1.1-oki - os=-proelf -@@ -1030,7 +1005,7 @@ - ppc-* | ppcbe-*) - basic_machine=powerpc-`echo "$basic_machine" | sed 's/^[^-]*-//'` - ;; -- ppcle | powerpclittle) -+ ppcle | powerpclittle | ppc-le | powerpc-little) - basic_machine=powerpcle-unknown - ;; - ppcle-* | powerpclittle-*) -@@ -1520,8 +1495,6 @@ - ;; - -nacl*) - ;; -- -ios) -- ;; - -none) - ;; - *) -@@ -1562,9 +1535,6 @@ - c4x-* | tic4x-*) - os=-coff - ;; -- c8051-*) -- os=-elf -- ;; - hexagon-*) - os=-elf - ;; -@@ -1608,6 +1578,9 @@ - mips*-*) - os=-elf - ;; -+ or1k-*) -+ os=-elf -+ ;; - or32-*) - os=-coff - ;; -@@ -1617,9 +1590,6 @@ - sparc-* | *-sun) - os=-sunos4.1.1 - ;; -- pru-*) -- os=-elf -- ;; - *-be) - os=-beos - ;; -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/depcomp psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/depcomp ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/depcomp 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/depcomp 2020-07-16 10:48:35.445584426 +0200 -@@ -786,6 +786,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/install-sh psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/install-sh ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/install-sh 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/install-sh 2020-07-16 10:48:35.447363805 +0200 -@@ -41,15 +41,19 @@ - # This script is compatible with the BSD install script, but was written - # from scratch. - --tab=' ' - nl=' - ' --IFS=" $tab$nl" -+IFS=" "" $nl" - --# Set DOITPROG to "echo" to test this script. -+# set DOITPROG to echo to test this script - -+# Don't use :- since 4.3BSD and earlier shells don't like it. - doit=${DOITPROG-} --doit_exec=${doit:-exec} -+if test -z "$doit"; then -+ doit_exec=exec -+else -+ doit_exec=$doit -+fi - - # Put in absolute file names if you don't have them in your path; - # or use environment vars. -@@ -64,6 +68,17 @@ - rmprog=${RMPROG-rm} - stripprog=${STRIPPROG-strip} - -+posix_glob='?' -+initialize_posix_glob=' -+ test "$posix_glob" != "?" || { -+ if (set -f) 2>/dev/null; then -+ posix_glob= -+ else -+ posix_glob=: -+ fi -+ } -+' -+ - posix_mkdir= - - # Desired mode of installed file. -@@ -82,7 +97,7 @@ - dst_arg= - - copy_on_change=false --is_target_a_directory=possibly -+no_target_directory= - - usage="\ - Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE -@@ -122,57 +137,46 @@ - -d) dir_arg=true;; - - -g) chgrpcmd="$chgrpprog $2" -- shift;; -+ shift;; - - --help) echo "$usage"; exit $?;; - - -m) mode=$2 -- case $mode in -- *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*) -- echo "$0: invalid mode: $mode" >&2 -- exit 1;; -- esac -- shift;; -+ case $mode in -+ *' '* | *' '* | *' -+'* | *'*'* | *'?'* | *'['*) -+ echo "$0: invalid mode: $mode" >&2 -+ exit 1;; -+ esac -+ shift;; - - -o) chowncmd="$chownprog $2" -- shift;; -+ shift;; - - -s) stripcmd=$stripprog;; - -- -t) -- is_target_a_directory=always -- dst_arg=$2 -- # Protect names problematic for 'test' and other utilities. -- case $dst_arg in -- -* | [=\(\)!]) dst_arg=./$dst_arg;; -- esac -- shift;; -+ -t) dst_arg=$2 -+ # Protect names problematic for 'test' and other utilities. -+ case $dst_arg in -+ -* | [=\(\)!]) dst_arg=./$dst_arg;; -+ esac -+ shift;; - -- -T) is_target_a_directory=never;; -+ -T) no_target_directory=true;; - - --version) echo "$0 $scriptversion"; exit $?;; - -- --) shift -- break;; -+ --) shift -+ break;; - -- -*) echo "$0: invalid option: $1" >&2 -- exit 1;; -+ -*) echo "$0: invalid option: $1" >&2 -+ exit 1;; - - *) break;; - esac - shift - done - --# We allow the use of options -d and -T together, by making -d --# take the precedence; this is for compatibility with GNU install. -- --if test -n "$dir_arg"; then -- if test -n "$dst_arg"; then -- echo "$0: target directory not allowed when installing a directory." >&2 -- exit 1 -- fi --fi -- - if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then - # When -d is used, all remaining arguments are directories to create. - # When -t is used, the destination is already specified. -@@ -204,15 +208,6 @@ - fi - - if test -z "$dir_arg"; then -- if test $# -gt 1 || test "$is_target_a_directory" = always; then -- if test ! -d "$dst_arg"; then -- echo "$0: $dst_arg: Is not a directory." >&2 -- exit 1 -- fi -- fi --fi -- --if test -z "$dir_arg"; then - do_exit='(exit $ret); exit $ret' - trap "ret=129; $do_exit" 1 - trap "ret=130; $do_exit" 2 -@@ -228,16 +223,16 @@ - - *[0-7]) - if test -z "$stripcmd"; then -- u_plus_rw= -+ u_plus_rw= - else -- u_plus_rw='% 200' -+ u_plus_rw='% 200' - fi - cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; - *) - if test -z "$stripcmd"; then -- u_plus_rw= -+ u_plus_rw= - else -- u_plus_rw=,u+rw -+ u_plus_rw=,u+rw - fi - cp_umask=$mode$u_plus_rw;; - esac -@@ -387,51 +382,53 @@ - # directory the slow way, step by step, checking for races as we go. - - case $dstdir in -- /*) prefix='/';; -- [-=\(\)!]*) prefix='./';; -- *) prefix='';; -+ /*) prefix='/';; -+ [-=\(\)!]*) prefix='./';; -+ *) prefix='';; - esac - -+ eval "$initialize_posix_glob" -+ - oIFS=$IFS - IFS=/ -- set -f -+ $posix_glob set -f - set fnord $dstdir - shift -- set +f -+ $posix_glob set +f - IFS=$oIFS - - prefixes= - - for d - do -- test X"$d" = X && continue -+ test X"$d" = X && continue - -- prefix=$prefix$d -- if test -d "$prefix"; then -- prefixes= -- else -- if $posix_mkdir; then -- (umask=$mkdir_umask && -- $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break -- # Don't fail if two instances are running concurrently. -- test -d "$prefix" || exit 1 -- else -- case $prefix in -- *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; -- *) qprefix=$prefix;; -- esac -- prefixes="$prefixes '$qprefix'" -- fi -- fi -- prefix=$prefix/ -+ prefix=$prefix$d -+ if test -d "$prefix"; then -+ prefixes= -+ else -+ if $posix_mkdir; then -+ (umask=$mkdir_umask && -+ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break -+ # Don't fail if two instances are running concurrently. -+ test -d "$prefix" || exit 1 -+ else -+ case $prefix in -+ *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; -+ *) qprefix=$prefix;; -+ esac -+ prefixes="$prefixes '$qprefix'" -+ fi -+ fi -+ prefix=$prefix/ - done - - if test -n "$prefixes"; then -- # Don't fail if two instances are running concurrently. -- (umask $mkdir_umask && -- eval "\$doit_exec \$mkdirprog $prefixes") || -- test -d "$dstdir" || exit 1 -- obsolete_mkdir_used=true -+ # Don't fail if two instances are running concurrently. -+ (umask $mkdir_umask && -+ eval "\$doit_exec \$mkdirprog $prefixes") || -+ test -d "$dstdir" || exit 1 -+ obsolete_mkdir_used=true - fi - fi - fi -@@ -466,12 +463,15 @@ - - # If -C, don't bother to copy if it wouldn't change the file. - if $copy_on_change && -- old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && -- new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && -- set -f && -+ old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && -+ new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && -+ -+ eval "$initialize_posix_glob" && -+ $posix_glob set -f && - set X $old && old=:$2:$4:$5:$6 && - set X $new && new=:$2:$4:$5:$6 && -- set +f && -+ $posix_glob set +f && -+ - test "$old" = "$new" && - $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 - then -@@ -484,24 +484,24 @@ - # to itself, or perhaps because mv is so ancient that it does not - # support -f. - { -- # Now remove or move aside any old file at destination location. -- # We try this two ways since rm can't unlink itself on some -- # systems and the destination file might be busy for other -- # reasons. In this case, the final cleanup might fail but the new -- # file should still install successfully. -- { -- test ! -f "$dst" || -- $doit $rmcmd -f "$dst" 2>/dev/null || -- { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && -- { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } -- } || -- { echo "$0: cannot unlink or rename $dst" >&2 -- (exit 1); exit 1 -- } -- } && -+ # Now remove or move aside any old file at destination location. -+ # We try this two ways since rm can't unlink itself on some -+ # systems and the destination file might be busy for other -+ # reasons. In this case, the final cleanup might fail but the new -+ # file should still install successfully. -+ { -+ test ! -f "$dst" || -+ $doit $rmcmd -f "$dst" 2>/dev/null || -+ { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && -+ { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } -+ } || -+ { echo "$0: cannot unlink or rename $dst" >&2 -+ (exit 1); exit 1 -+ } -+ } && - -- # Now rename the file to the real destination. -- $doit $mvcmd "$dsttmp" "$dst" -+ # Now rename the file to the real destination. -+ $doit $mvcmd "$dsttmp" "$dst" - } - fi || exit 1 - -@@ -513,6 +513,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/libtool.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/libtool.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/libtool.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/libtool.m4 2020-07-16 10:48:35.455126918 +0200 -@@ -1,6 +1,8 @@ - # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- - # --# Copyright (C) 1996-2001, 2003-2015 Free Software Foundation, Inc. -+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -+# 2006, 2007, 2008, 2009, 2010, 2011 Free Software -+# Foundation, Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is free software; the Free Software Foundation gives -@@ -8,30 +10,36 @@ - # modifications, as long as this notice is preserved. - - m4_define([_LT_COPYING], [dnl --# Copyright (C) 2014 Free Software Foundation, Inc. --# This is free software; see the source for copying conditions. There is NO --# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -- --# GNU Libtool is free software; you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation; either version 2 of of the License, or --# (at your option) any later version. -+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -+# 2006, 2007, 2008, 2009, 2010, 2011 Free Software -+# Foundation, Inc. -+# Written by Gordon Matzigkeit, 1996 -+# -+# This file is part of GNU Libtool. -+# -+# GNU Libtool is free software; you can redistribute it and/or -+# modify it under the terms of the GNU General Public License as -+# published by the Free Software Foundation; either version 2 of -+# the License, or (at your option) any later version. - # --# As a special exception to the GNU General Public License, if you --# distribute this file as part of a program or library that is built --# using GNU Libtool, you may include this file under the same --# distribution terms that you use for the rest of that program. -+# As a special exception to the GNU General Public License, -+# if you distribute this file as part of a program or library that -+# is built using GNU Libtool, you may include this file under the -+# same distribution terms that you use for the rest of that program. - # --# GNU Libtool is distributed in the hope that it will be useful, but --# WITHOUT ANY WARRANTY; without even the implied warranty of -+# GNU Libtool is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of - # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - # GNU General Public License for more details. - # - # You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -+# along with GNU Libtool; see the file COPYING. If not, a copy -+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or -+# obtained by writing to the Free Software Foundation, Inc., -+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - ]) - --# serial 58 LT_INIT -+# serial 57 LT_INIT - - - # LT_PREREQ(VERSION) -@@ -59,7 +67,7 @@ - # LT_INIT([OPTIONS]) - # ------------------ - AC_DEFUN([LT_INIT], --[AC_PREREQ([2.62])dnl We use AC_PATH_PROGS_FEATURE_CHECK -+[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT - AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl - AC_BEFORE([$0], [LT_LANG])dnl - AC_BEFORE([$0], [LT_OUTPUT])dnl -@@ -83,7 +91,7 @@ - _LT_SET_OPTIONS([$0], [$1]) - - # This can be used to rebuild libtool when needed --LIBTOOL_DEPS=$ltmain -+LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. - LIBTOOL='$(SHELL) $(top_builddir)/libtool' -@@ -103,43 +111,26 @@ - dnl AC_DEFUN([AM_PROG_LIBTOOL], []) - - --# _LT_PREPARE_CC_BASENAME --# ----------------------- --m4_defun([_LT_PREPARE_CC_BASENAME], [ --# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. --func_cc_basename () --{ -- for cc_temp in @S|@*""; do -- case $cc_temp in -- compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; -- distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; -- \-*) ;; -- *) break;; -- esac -- done -- func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` --} --])# _LT_PREPARE_CC_BASENAME -- -- - # _LT_CC_BASENAME(CC) - # ------------------- --# It would be clearer to call AC_REQUIREs from _LT_PREPARE_CC_BASENAME, --# but that macro is also expanded into generated libtool script, which --# arranges for $SED and $ECHO to be set by different means. -+# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. - m4_defun([_LT_CC_BASENAME], --[m4_require([_LT_PREPARE_CC_BASENAME])dnl --AC_REQUIRE([_LT_DECL_SED])dnl --AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl --func_cc_basename $1 --cc_basename=$func_cc_basename_result -+[for cc_temp in $1""; do -+ case $cc_temp in -+ compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; -+ distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; -+ \-*) ;; -+ *) break;; -+ esac -+done -+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - ]) - - - # _LT_FILEUTILS_DEFAULTS - # ---------------------- - # It is okay to use these file commands and assume they have been set --# sensibly after 'm4_require([_LT_FILEUTILS_DEFAULTS])'. -+# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'. - m4_defun([_LT_FILEUTILS_DEFAULTS], - [: ${CP="cp -f"} - : ${MV="mv -f"} -@@ -186,16 +177,15 @@ - m4_require([_LT_CMD_OLD_ARCHIVE])dnl - m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl - m4_require([_LT_WITH_SYSROOT])dnl --m4_require([_LT_CMD_TRUNCATE])dnl - - _LT_CONFIG_LIBTOOL_INIT([ --# See if we are running on zsh, and set the options that allow our -+# See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes INIT. --if test -n "\${ZSH_VERSION+set}"; then -+if test -n "\${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - ]) --if test -n "${ZSH_VERSION+set}"; then -+if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - -@@ -208,7 +198,7 @@ - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. -- if test set != "${COLLECT_NAMES+set}"; then -+ if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi -@@ -219,14 +209,14 @@ - ofile=libtool - can_build_shared=yes - --# All known linkers require a '.a' archive for static linking (except MSVC, -+# All known linkers require a `.a' archive for static linking (except MSVC, - # which needs '.lib'). - libext=a - --with_gnu_ld=$lt_cv_prog_gnu_ld -+with_gnu_ld="$lt_cv_prog_gnu_ld" - --old_CC=$CC --old_CFLAGS=$CFLAGS -+old_CC="$CC" -+old_CFLAGS="$CFLAGS" - - # Set sane defaults for various variables - test -z "$CC" && CC=cc -@@ -279,14 +269,14 @@ - - # _LT_PROG_LTMAIN - # --------------- --# Note that this code is called both from 'configure', and 'config.status' -+# Note that this code is called both from `configure', and `config.status' - # now that we use AC_CONFIG_COMMANDS to generate libtool. Notably, --# 'config.status' has no value for ac_aux_dir unless we are using Automake, -+# `config.status' has no value for ac_aux_dir unless we are using Automake, - # so we pass a copy along to make sure it has a sensible value anyway. - m4_defun([_LT_PROG_LTMAIN], - [m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl - _LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir']) --ltmain=$ac_aux_dir/ltmain.sh -+ltmain="$ac_aux_dir/ltmain.sh" - ])# _LT_PROG_LTMAIN - - -@@ -296,7 +286,7 @@ - - # So that we can recreate a full libtool script including additional - # tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS --# in macros and then make a single call at the end using the 'libtool' -+# in macros and then make a single call at the end using the `libtool' - # label. - - -@@ -431,8 +421,8 @@ - - # _LT_CONFIG_STATUS_DECLARE([VARNAME]) - # ------------------------------------ --# Quote a variable value, and forward it to 'config.status' so that its --# declaration there will have the same value as in 'configure'. VARNAME -+# Quote a variable value, and forward it to `config.status' so that its -+# declaration there will have the same value as in `configure'. VARNAME - # must have a single quote delimited value for this to work. - m4_define([_LT_CONFIG_STATUS_DECLARE], - [$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`']) -@@ -456,7 +446,7 @@ - # Output comment and list of tags supported by the script - m4_defun([_LT_LIBTOOL_TAGS], - [_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl --available_tags='_LT_TAGS'dnl -+available_tags="_LT_TAGS"dnl - ]) - - -@@ -484,7 +474,7 @@ - # _LT_LIBTOOL_CONFIG_VARS - # ----------------------- - # Produce commented declarations of non-tagged libtool config variables --# suitable for insertion in the LIBTOOL CONFIG section of the 'libtool' -+# suitable for insertion in the LIBTOOL CONFIG section of the `libtool' - # script. Tagged libtool config variables (even for the LIBTOOL CONFIG - # section) are produced by _LT_LIBTOOL_TAG_VARS. - m4_defun([_LT_LIBTOOL_CONFIG_VARS], -@@ -510,8 +500,8 @@ - # Send accumulated output to $CONFIG_STATUS. Thanks to the lists of - # variables for single and double quote escaping we saved from calls - # to _LT_DECL, we can put quote escaped variables declarations --# into 'config.status', and then the shell code to quote escape them in --# for loops in 'config.status'. Finally, any additional code accumulated -+# into `config.status', and then the shell code to quote escape them in -+# for loops in `config.status'. Finally, any additional code accumulated - # from calls to _LT_CONFIG_LIBTOOL_INIT is expanded. - m4_defun([_LT_CONFIG_COMMANDS], - [AC_PROVIDE_IFELSE([LT_OUTPUT], -@@ -557,7 +547,7 @@ - ]], lt_decl_quote_varnames); do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[[\\\\\\\`\\"\\\$]]*) -- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes -+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" -@@ -570,7 +560,7 @@ - ]], lt_decl_dquote_varnames); do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[[\\\\\\\`\\"\\\$]]*) -- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes -+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" -@@ -586,7 +576,7 @@ - # Generate a child script FILE with all initialization necessary to - # reuse the environment learned by the parent script, and make the - # file executable. If COMMENT is supplied, it is inserted after the --# '#!' sequence but before initialization text begins. After this -+# `#!' sequence but before initialization text begins. After this - # macro, additional text can be appended to FILE to form the body of - # the child script. The macro ends with non-zero status if the - # file could not be fully written (such as if the disk is full). -@@ -608,7 +598,7 @@ - _AS_PREPARE - exec AS_MESSAGE_FD>&1 - _ASEOF --test 0 = "$lt_write_fail" && chmod +x $1[]dnl -+test $lt_write_fail = 0 && chmod +x $1[]dnl - m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT - - # LT_OUTPUT -@@ -631,7 +621,7 @@ - } >&AS_MESSAGE_LOG_FD - - lt_cl_help="\ --'$as_me' creates a local libtool stub from the current configuration, -+\`$as_me' creates a local libtool stub from the current configuration, - for use in further configure time tests before the real libtool is - generated. - -@@ -653,7 +643,7 @@ - This config.lt script is free software; the Free Software Foundation - gives unlimited permision to copy, distribute and modify it." - --while test 0 != $[#] -+while test $[#] != 0 - do - case $[1] in - --version | --v* | -V ) -@@ -666,10 +656,10 @@ - lt_cl_silent=: ;; - - -*) AC_MSG_ERROR([unrecognized option: $[1] --Try '$[0] --help' for more information.]) ;; -+Try \`$[0] --help' for more information.]) ;; - - *) AC_MSG_ERROR([unrecognized argument: $[1] --Try '$[0] --help' for more information.]) ;; -+Try \`$[0] --help' for more information.]) ;; - esac - shift - done -@@ -695,7 +685,7 @@ - # open by configure. Here we exec the FD to /dev/null, effectively closing - # config.log, so it can be properly (re)opened and appended to by config.lt. - lt_cl_success=: --test yes = "$silent" && -+test "$silent" = yes && - lt_config_lt_args="$lt_config_lt_args --quiet" - exec AS_MESSAGE_LOG_FD>/dev/null - $SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false -@@ -715,30 +705,27 @@ - _LT_CONFIG_SAVE_COMMANDS([ - m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl - m4_if(_LT_TAG, [C], [ -- # See if we are running on zsh, and set the options that allow our -+ # See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes. -- if test -n "${ZSH_VERSION+set}"; then -+ if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - -- cfgfile=${ofile}T -+ cfgfile="${ofile}T" - trap "$RM \"$cfgfile\"; exit 1" 1 2 15 - $RM "$cfgfile" - - cat <<_LT_EOF >> "$cfgfile" - #! $SHELL --# Generated automatically by $as_me ($PACKAGE) $VERSION --# NOTE: Changes made to this file will be lost: look at ltmain.sh. -- --# Provide generalized library-building support services. --# Written by Gordon Matzigkeit, 1996 - -+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. -+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION -+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: -+# NOTE: Changes made to this file will be lost: look at ltmain.sh. -+# - _LT_COPYING - _LT_LIBTOOL_TAGS - --# Configured defaults for sys_lib_dlsearch_path munging. --: \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"} -- - # ### BEGIN LIBTOOL CONFIG - _LT_LIBTOOL_CONFIG_VARS - _LT_LIBTOOL_TAG_VARS -@@ -746,24 +733,13 @@ - - _LT_EOF - -- cat <<'_LT_EOF' >> "$cfgfile" -- --# ### BEGIN FUNCTIONS SHARED WITH CONFIGURE -- --_LT_PREPARE_MUNGE_PATH_LIST --_LT_PREPARE_CC_BASENAME -- --# ### END FUNCTIONS SHARED WITH CONFIGURE -- --_LT_EOF -- - case $host_os in - aix3*) - cat <<\_LT_EOF >> "$cfgfile" - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. --if test set != "${COLLECT_NAMES+set}"; then -+if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi -@@ -780,6 +756,8 @@ - sed '$q' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - -+ _LT_PROG_REPLACE_SHELLFNS -+ - mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" -@@ -797,6 +775,7 @@ - [m4_if([$1], [], [ - PACKAGE='$PACKAGE' - VERSION='$VERSION' -+ TIMESTAMP='$TIMESTAMP' - RM='$RM' - ofile='$ofile'], []) - ])dnl /_LT_CONFIG_SAVE_COMMANDS -@@ -995,7 +974,7 @@ - - AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod], - [lt_cv_apple_cc_single_mod=no -- if test -z "$LT_MULTI_MODULE"; then -+ if test -z "${LT_MULTI_MODULE}"; then - # By default we will add the -single_module flag. You can override - # by either setting the environment variable LT_MULTI_MODULE - # non-empty at configure time, or by adding -multi_module to the -@@ -1013,7 +992,7 @@ - cat conftest.err >&AS_MESSAGE_LOG_FD - # Otherwise, if the output was created with a 0 exit code from - # the compiler, it worked. -- elif test -f libconftest.dylib && test 0 = "$_lt_result"; then -+ elif test -f libconftest.dylib && test $_lt_result -eq 0; then - lt_cv_apple_cc_single_mod=yes - else - cat conftest.err >&AS_MESSAGE_LOG_FD -@@ -1031,7 +1010,7 @@ - AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], - [lt_cv_ld_exported_symbols_list=yes], - [lt_cv_ld_exported_symbols_list=no]) -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - ]) - - AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load], -@@ -1053,7 +1032,7 @@ - _lt_result=$? - if test -s conftest.err && $GREP force_load conftest.err; then - cat conftest.err >&AS_MESSAGE_LOG_FD -- elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then -+ elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then - lt_cv_ld_force_load=yes - else - cat conftest.err >&AS_MESSAGE_LOG_FD -@@ -1063,32 +1042,32 @@ - ]) - case $host_os in - rhapsody* | darwin1.[[012]]) -- _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; - darwin1.*) -- _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - darwin*) # darwin 5.x on - # if running on 10.5 or later, the deployment target defaults - # to the OS version, if on x86, and 10.4, the deployment - # target defaults to 10.4. Don't you love it? - case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in - 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*) -- _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; -- 10.[[012]][[,.]]*) -- _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; -+ 10.[[012]]*) -+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - 10.*) -- _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - esac - ;; - esac -- if test yes = "$lt_cv_apple_cc_single_mod"; then -+ if test "$lt_cv_apple_cc_single_mod" = "yes"; then - _lt_dar_single_mod='$single_module' - fi -- if test yes = "$lt_cv_ld_exported_symbols_list"; then -- _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym' -+ if test "$lt_cv_ld_exported_symbols_list" = "yes"; then -+ _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' - else -- _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib' -+ _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' - fi -- if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then -+ if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then - _lt_dsymutil='~$DSYMUTIL $lib || :' - else - _lt_dsymutil= -@@ -1108,29 +1087,29 @@ - _LT_TAGVAR(hardcode_direct, $1)=no - _LT_TAGVAR(hardcode_automatic, $1)=yes - _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported -- if test yes = "$lt_cv_ld_force_load"; then -- _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' -+ if test "$lt_cv_ld_force_load" = "yes"; then -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes], - [FC], [_LT_TAGVAR(compiler_needs_object, $1)=yes]) - else - _LT_TAGVAR(whole_archive_flag_spec, $1)='' - fi - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(allow_undefined_flag, $1)=$_lt_dar_allow_undefined -+ _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined" - case $cc_basename in -- ifort*|nagfor*) _lt_dar_can_shared=yes ;; -+ ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac -- if test yes = "$_lt_dar_can_shared"; then -+ if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all -- _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" -- _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" -- _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" -- _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" -+ _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" -+ _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" -+ _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" -+ _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - m4_if([$1], [CXX], --[ if test yes != "$lt_cv_apple_cc_single_mod"; then -- _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dsymutil" -- _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dar_export_syms$_lt_dsymutil" -+[ if test "$lt_cv_apple_cc_single_mod" != "yes"; then -+ _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}" -+ _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}" - fi - ],[]) - else -@@ -1150,7 +1129,7 @@ - # Allow to override them for all tags through lt_cv_aix_libpath. - m4_defun([_LT_SYS_MODULE_PATH_AIX], - [m4_require([_LT_DECL_SED])dnl --if test set = "${lt_cv_aix_libpath+set}"; then -+if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])], -@@ -1168,7 +1147,7 @@ - _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi],[]) - if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then -- _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=/usr/lib:/lib -+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib" - fi - ]) - aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1]) -@@ -1188,8 +1167,8 @@ - # ----------------------- - # Find how we can fake an echo command that does not interpret backslash. - # In particular, with Autoconf 2.60 or later we add some code to the start --# of the generated configure script that will find a shell with a builtin --# printf (that we can use as an echo command). -+# of the generated configure script which will find a shell with a builtin -+# printf (which we can use as an echo command). - m4_defun([_LT_PROG_ECHO_BACKSLASH], - [ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' - ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO -@@ -1217,10 +1196,10 @@ - # Invoke $ECHO with all args, space-separated. - func_echo_all () - { -- $ECHO "$*" -+ $ECHO "$*" - } - --case $ECHO in -+case "$ECHO" in - printf*) AC_MSG_RESULT([printf]) ;; - print*) AC_MSG_RESULT([print -r]) ;; - *) AC_MSG_RESULT([cat]) ;; -@@ -1246,17 +1225,16 @@ - AC_DEFUN([_LT_WITH_SYSROOT], - [AC_MSG_CHECKING([for sysroot]) - AC_ARG_WITH([sysroot], --[AS_HELP_STRING([--with-sysroot@<:@=DIR@:>@], -- [Search for dependent libraries within DIR (or the compiler's sysroot -- if not specified).])], -+[ --with-sysroot[=DIR] Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified).], - [], [with_sysroot=no]) - - dnl lt_sysroot will always be passed unquoted. We quote it here - dnl in case the user passed a directory name. - lt_sysroot= --case $with_sysroot in #( -+case ${with_sysroot} in #( - yes) -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - lt_sysroot=`$CC --print-sysroot 2>/dev/null` - fi - ;; #( -@@ -1266,14 +1244,14 @@ - no|'') - ;; #( - *) -- AC_MSG_RESULT([$with_sysroot]) -+ AC_MSG_RESULT([${with_sysroot}]) - AC_MSG_ERROR([The sysroot must be an absolute path.]) - ;; - esac - - AC_MSG_RESULT([${lt_sysroot:-no}]) - _LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl --[dependent libraries, and where our libraries should be installed.])]) -+[dependent libraries, and in which our libraries should be installed.])]) - - # _LT_ENABLE_LOCK - # --------------- -@@ -1281,33 +1259,31 @@ - [AC_ARG_ENABLE([libtool-lock], - [AS_HELP_STRING([--disable-libtool-lock], - [avoid locking (might break parallel builds)])]) --test no = "$enable_libtool_lock" || enable_libtool_lock=yes -+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes - - # Some flags need to be propagated to the compiler or linker for good - # libtool support. - case $host in - ia64-*-hpux*) -- # Find out what ABI is being produced by ac_compile, and set mode -- # options accordingly. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.$ac_objext` in - *ELF-32*) -- HPUX_IA64_MODE=32 -+ HPUX_IA64_MODE="32" - ;; - *ELF-64*) -- HPUX_IA64_MODE=64 -+ HPUX_IA64_MODE="64" - ;; - esac - fi - rm -rf conftest* - ;; - *-*-irix6*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -+ # Find out which ABI we are using. - echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -melf32bsmip" -@@ -1336,46 +1312,9 @@ - rm -rf conftest* - ;; - --mips64*-*linux*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -- echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext -- if AC_TRY_EVAL(ac_compile); then -- emul=elf -- case `/usr/bin/file conftest.$ac_objext` in -- *32-bit*) -- emul="${emul}32" -- ;; -- *64-bit*) -- emul="${emul}64" -- ;; -- esac -- case `/usr/bin/file conftest.$ac_objext` in -- *MSB*) -- emul="${emul}btsmip" -- ;; -- *LSB*) -- emul="${emul}ltsmip" -- ;; -- esac -- case `/usr/bin/file conftest.$ac_objext` in -- *N32*) -- emul="${emul}n32" -- ;; -- esac -- LD="${LD-ld} -m $emul" -- fi -- rm -rf conftest* -- ;; -- --x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ -+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ - s390*-*linux*|s390*-*tpf*|sparc*-*linux*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. Note that the listed cases only cover the -- # situations where additional linker options are needed (such as when -- # doing 32-bit compilation for a host where ld defaults to 64-bit, or -- # vice versa); the common cases where no linker options are needed do -- # not appear in the list. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.o` in -@@ -1385,19 +1324,9 @@ - LD="${LD-ld} -m elf_i386_fbsd" - ;; - x86_64-*linux*) -- case `/usr/bin/file conftest.o` in -- *x86-64*) -- LD="${LD-ld} -m elf32_x86_64" -- ;; -- *) -- LD="${LD-ld} -m elf_i386" -- ;; -- esac -- ;; -- powerpc64le-*linux*) -- LD="${LD-ld} -m elf32lppclinux" -+ LD="${LD-ld} -m elf_i386" - ;; -- powerpc64-*linux*) -+ ppc64-*linux*|powerpc64-*linux*) - LD="${LD-ld} -m elf32ppclinux" - ;; - s390x-*linux*) -@@ -1416,10 +1345,7 @@ - x86_64-*linux*) - LD="${LD-ld} -m elf_x86_64" - ;; -- powerpcle-*linux*) -- LD="${LD-ld} -m elf64lppc" -- ;; -- powerpc-*linux*) -+ ppc*-*linux*|powerpc*-*linux*) - LD="${LD-ld} -m elf64ppc" - ;; - s390*-*linux*|s390*-*tpf*) -@@ -1437,20 +1363,19 @@ - - *-*-sco3.2v5*) - # On SCO OpenServer 5, we need -belf to get full-featured binaries. -- SAVE_CFLAGS=$CFLAGS -+ SAVE_CFLAGS="$CFLAGS" - CFLAGS="$CFLAGS -belf" - AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf, - [AC_LANG_PUSH(C) - AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no]) - AC_LANG_POP]) -- if test yes != "$lt_cv_cc_needs_belf"; then -+ if test x"$lt_cv_cc_needs_belf" != x"yes"; then - # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf -- CFLAGS=$SAVE_CFLAGS -+ CFLAGS="$SAVE_CFLAGS" - fi - ;; - *-*solaris*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.o` in -@@ -1458,7 +1383,7 @@ - case $lt_cv_prog_gnu_ld in - yes*) - case $host in -- i?86-*-solaris*|x86_64-*-solaris*) -+ i?86-*-solaris*) - LD="${LD-ld} -m elf_x86_64" - ;; - sparc*-*-solaris*) -@@ -1467,7 +1392,7 @@ - esac - # GNU ld 2.21 introduced _sol2 emulations. Use them if available. - if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then -- LD=${LD-ld}_sol2 -+ LD="${LD-ld}_sol2" - fi - ;; - *) -@@ -1483,7 +1408,7 @@ - ;; - esac - --need_locks=$enable_libtool_lock -+need_locks="$enable_libtool_lock" - ])# _LT_ENABLE_LOCK - - -@@ -1502,11 +1427,11 @@ - [echo conftest.$ac_objext > conftest.lst - lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD' - AC_TRY_EVAL([lt_ar_try]) -- if test 0 -eq "$ac_status"; then -+ if test "$ac_status" -eq 0; then - # Ensure the archiver fails upon bogus file names. - rm -f conftest.$ac_objext libconftest.a - AC_TRY_EVAL([lt_ar_try]) -- if test 0 -ne "$ac_status"; then -+ if test "$ac_status" -ne 0; then - lt_cv_ar_at_file=@ - fi - fi -@@ -1514,7 +1439,7 @@ - ]) - ]) - --if test no = "$lt_cv_ar_at_file"; then -+if test "x$lt_cv_ar_at_file" = xno; then - archiver_list_spec= - else - archiver_list_spec=$lt_cv_ar_at_file -@@ -1545,7 +1470,7 @@ - - if test -n "$RANLIB"; then - case $host_os in -- bitrig* | openbsd*) -+ openbsd*) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" - ;; - *) -@@ -1581,7 +1506,7 @@ - [$2=no - m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4]) - echo "$lt_simple_compile_test_code" > conftest.$ac_ext -- lt_compiler_flag="$3" ## exclude from sc_useless_quotes_in_assignment -+ lt_compiler_flag="$3" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins -@@ -1608,7 +1533,7 @@ - $RM conftest* - ]) - --if test yes = "[$]$2"; then -+if test x"[$]$2" = xyes; then - m4_if([$5], , :, [$5]) - else - m4_if([$6], , :, [$6]) -@@ -1630,7 +1555,7 @@ - m4_require([_LT_DECL_SED])dnl - AC_CACHE_CHECK([$1], [$2], - [$2=no -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $3" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then -@@ -1649,10 +1574,10 @@ - fi - fi - $RM -r conftest* -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - ]) - --if test yes = "[$]$2"; then -+if test x"[$]$2" = xyes; then - m4_if([$4], , :, [$4]) - else - m4_if([$5], , :, [$5]) -@@ -1673,7 +1598,7 @@ - AC_MSG_CHECKING([the maximum length of command line arguments]) - AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl - i=0 -- teststring=ABCD -+ teststring="ABCD" - - case $build_os in - msdosdjgpp*) -@@ -1713,7 +1638,7 @@ - lt_cv_sys_max_cmd_len=8192; - ;; - -- bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*) -+ netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) - # This has been around since 386BSD, at least. Likely further. - if test -x /sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` -@@ -1763,23 +1688,22 @@ - ;; - *) - lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` -- if test -n "$lt_cv_sys_max_cmd_len" && \ -- test undefined != "$lt_cv_sys_max_cmd_len"; then -+ if test -n "$lt_cv_sys_max_cmd_len"; then - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - else - # Make teststring a little bigger before we do anything with it. - # a 1K string should be a reasonable start. -- for i in 1 2 3 4 5 6 7 8; do -+ for i in 1 2 3 4 5 6 7 8 ; do - teststring=$teststring$teststring - done - SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} - # If test is not a shell built-in, we'll probably end up computing a - # maximum length that is only half of the actual maximum length, but - # we can't tell. -- while { test X`env echo "$teststring$teststring" 2>/dev/null` \ -+ while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ - = "X$teststring$teststring"; } >/dev/null 2>&1 && -- test 17 != "$i" # 1/2 MB should be enough -+ test $i != 17 # 1/2 MB should be enough - do - i=`expr $i + 1` - teststring=$teststring$teststring -@@ -1795,7 +1719,7 @@ - ;; - esac - ]) --if test -n "$lt_cv_sys_max_cmd_len"; then -+if test -n $lt_cv_sys_max_cmd_len ; then - AC_MSG_RESULT($lt_cv_sys_max_cmd_len) - else - AC_MSG_RESULT(none) -@@ -1823,7 +1747,7 @@ - # ---------------------------------------------------------------- - m4_defun([_LT_TRY_DLOPEN_SELF], - [m4_require([_LT_HEADER_DLFCN])dnl --if test yes = "$cross_compiling"; then : -+if test "$cross_compiling" = yes; then : - [$4] - else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 -@@ -1870,9 +1794,9 @@ - # endif - #endif - --/* When -fvisibility=hidden is used, assume the code has been annotated -+/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ --#if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) - int fnord () __attribute__((visibility("default"))); - #endif - -@@ -1898,7 +1822,7 @@ - return status; - }] - _LT_EOF -- if AC_TRY_EVAL(ac_link) && test -s "conftest$ac_exeext" 2>/dev/null; then -+ if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null - lt_status=$? - case x$lt_status in -@@ -1919,7 +1843,7 @@ - # ------------------ - AC_DEFUN([LT_SYS_DLOPEN_SELF], - [m4_require([_LT_HEADER_DLFCN])dnl --if test yes != "$enable_dlopen"; then -+if test "x$enable_dlopen" != xyes; then - enable_dlopen=unknown - enable_dlopen_self=unknown - enable_dlopen_self_static=unknown -@@ -1929,52 +1853,44 @@ - - case $host_os in - beos*) -- lt_cv_dlopen=load_add_on -+ lt_cv_dlopen="load_add_on" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ;; - - mingw* | pw32* | cegcc*) -- lt_cv_dlopen=LoadLibrary -+ lt_cv_dlopen="LoadLibrary" - lt_cv_dlopen_libs= - ;; - - cygwin*) -- lt_cv_dlopen=dlopen -+ lt_cv_dlopen="dlopen" - lt_cv_dlopen_libs= - ;; - - darwin*) -- # if libdl is installed we need to link against it -+ # if libdl is installed we need to link against it - AC_CHECK_LIB([dl], [dlopen], -- [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl],[ -- lt_cv_dlopen=dyld -+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[ -+ lt_cv_dlopen="dyld" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ]) - ;; - -- tpf*) -- # Don't try to run any link tests for TPF. We know it's impossible -- # because TPF is a cross-compiler, and we know how we open DSOs. -- lt_cv_dlopen=dlopen -- lt_cv_dlopen_libs= -- lt_cv_dlopen_self=no -- ;; -- - *) - AC_CHECK_FUNC([shl_load], -- [lt_cv_dlopen=shl_load], -+ [lt_cv_dlopen="shl_load"], - [AC_CHECK_LIB([dld], [shl_load], -- [lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld], -+ [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"], - [AC_CHECK_FUNC([dlopen], -- [lt_cv_dlopen=dlopen], -+ [lt_cv_dlopen="dlopen"], - [AC_CHECK_LIB([dl], [dlopen], -- [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl], -+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"], - [AC_CHECK_LIB([svld], [dlopen], -- [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld], -+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"], - [AC_CHECK_LIB([dld], [dld_link], -- [lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld]) -+ [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"]) - ]) - ]) - ]) -@@ -1983,21 +1899,21 @@ - ;; - esac - -- if test no = "$lt_cv_dlopen"; then -- enable_dlopen=no -- else -+ if test "x$lt_cv_dlopen" != xno; then - enable_dlopen=yes -+ else -+ enable_dlopen=no - fi - - case $lt_cv_dlopen in - dlopen) -- save_CPPFLAGS=$CPPFLAGS -- test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" -+ save_CPPFLAGS="$CPPFLAGS" -+ test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" - -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" - -- save_LIBS=$LIBS -+ save_LIBS="$LIBS" - LIBS="$lt_cv_dlopen_libs $LIBS" - - AC_CACHE_CHECK([whether a program can dlopen itself], -@@ -2007,7 +1923,7 @@ - lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross) - ]) - -- if test yes = "$lt_cv_dlopen_self"; then -+ if test "x$lt_cv_dlopen_self" = xyes; then - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" - AC_CACHE_CHECK([whether a statically linked program can dlopen itself], - lt_cv_dlopen_self_static, [dnl -@@ -2017,9 +1933,9 @@ - ]) - fi - -- CPPFLAGS=$save_CPPFLAGS -- LDFLAGS=$save_LDFLAGS -- LIBS=$save_LIBS -+ CPPFLAGS="$save_CPPFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+ LIBS="$save_LIBS" - ;; - esac - -@@ -2111,8 +2027,8 @@ - m4_require([_LT_FILEUTILS_DEFAULTS])dnl - _LT_COMPILER_C_O([$1]) - --hard_links=nottested --if test no = "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" && test no != "$need_locks"; then -+hard_links="nottested" -+if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - AC_MSG_CHECKING([if we can lock with hard links]) - hard_links=yes -@@ -2122,8 +2038,8 @@ - ln conftest.a conftest.b 2>&5 || hard_links=no - ln conftest.a conftest.b 2>/dev/null && hard_links=no - AC_MSG_RESULT([$hard_links]) -- if test no = "$hard_links"; then -- AC_MSG_WARN(['$CC' does not support '-c -o', so 'make -j' may be unsafe]) -+ if test "$hard_links" = no; then -+ AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe]) - need_locks=warn - fi - else -@@ -2150,8 +2066,8 @@ - _LT_DECL([], [objdir], [0], - [The name of the directory that contains temporary libtool files])dnl - m4_pattern_allow([LT_OBJDIR])dnl --AC_DEFINE_UNQUOTED([LT_OBJDIR], "$lt_cv_objdir/", -- [Define to the sub-directory where libtool stores uninstalled libraries.]) -+AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/", -+ [Define to the sub-directory in which libtool stores uninstalled libraries.]) - ])# _LT_CHECK_OBJDIR - - -@@ -2163,15 +2079,15 @@ - _LT_TAGVAR(hardcode_action, $1)= - if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" || - test -n "$_LT_TAGVAR(runpath_var, $1)" || -- test yes = "$_LT_TAGVAR(hardcode_automatic, $1)"; then -+ test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then - - # We can hardcode non-existent directories. -- if test no != "$_LT_TAGVAR(hardcode_direct, $1)" && -+ if test "$_LT_TAGVAR(hardcode_direct, $1)" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one -- ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" && -- test no != "$_LT_TAGVAR(hardcode_minus_L, $1)"; then -+ ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no && -+ test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then - # Linking always hardcodes the temporary library directory. - _LT_TAGVAR(hardcode_action, $1)=relink - else -@@ -2185,12 +2101,12 @@ - fi - AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)]) - --if test relink = "$_LT_TAGVAR(hardcode_action, $1)" || -- test yes = "$_LT_TAGVAR(inherit_rpath, $1)"; then -+if test "$_LT_TAGVAR(hardcode_action, $1)" = relink || -+ test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then - # Fast installation is not supported - enable_fast_install=no --elif test yes = "$shlibpath_overrides_runpath" || -- test no = "$enable_shared"; then -+elif test "$shlibpath_overrides_runpath" = yes || -+ test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless - fi -@@ -2214,7 +2130,7 @@ - # FIXME - insert some real tests, host_os isn't really good enough - case $host_os in - darwin*) -- if test -n "$STRIP"; then -+ if test -n "$STRIP" ; then - striplib="$STRIP -x" - old_striplib="$STRIP -S" - AC_MSG_RESULT([yes]) -@@ -2232,47 +2148,6 @@ - ])# _LT_CMD_STRIPLIB - - --# _LT_PREPARE_MUNGE_PATH_LIST --# --------------------------- --# Make sure func_munge_path_list() is defined correctly. --m4_defun([_LT_PREPARE_MUNGE_PATH_LIST], --[[# func_munge_path_list VARIABLE PATH --# ----------------------------------- --# VARIABLE is name of variable containing _space_ separated list of --# directories to be munged by the contents of PATH, which is string --# having a format: --# "DIR[:DIR]:" --# string "DIR[ DIR]" will be prepended to VARIABLE --# ":DIR[:DIR]" --# string "DIR[ DIR]" will be appended to VARIABLE --# "DIRP[:DIRP]::[DIRA:]DIRA" --# string "DIRP[ DIRP]" will be prepended to VARIABLE and string --# "DIRA[ DIRA]" will be appended to VARIABLE --# "DIR[:DIR]" --# VARIABLE will be replaced by "DIR[ DIR]" --func_munge_path_list () --{ -- case x@S|@2 in -- x) -- ;; -- *:) -- eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'` \@S|@@S|@1\" -- ;; -- x:*) -- eval @S|@1=\"\@S|@@S|@1 `$ECHO @S|@2 | $SED 's/:/ /g'`\" -- ;; -- *::*) -- eval @S|@1=\"\@S|@@S|@1\ `$ECHO @S|@2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" -- eval @S|@1=\"`$ECHO @S|@2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \@S|@@S|@1\" -- ;; -- *) -- eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'`\" -- ;; -- esac --} --]])# _LT_PREPARE_PATH_LIST -- -- - # _LT_SYS_DYNAMIC_LINKER([TAG]) - # ----------------------------- - # PORTME Fill in your ld.so characteristics -@@ -2283,18 +2158,17 @@ - m4_require([_LT_DECL_OBJDUMP])dnl - m4_require([_LT_DECL_SED])dnl - m4_require([_LT_CHECK_SHELL_FEATURES])dnl --m4_require([_LT_PREPARE_MUNGE_PATH_LIST])dnl - AC_MSG_CHECKING([dynamic linker characteristics]) - m4_if([$1], - [], [ --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - case $host_os in -- darwin*) lt_awk_arg='/^libraries:/,/LR/' ;; -- *) lt_awk_arg='/^libraries:/' ;; -+ darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; -+ *) lt_awk_arg="/^libraries:/" ;; - esac - case $host_os in -- mingw* | cegcc*) lt_sed_strip_eq='s|=\([[A-Za-z]]:\)|\1|g' ;; -- *) lt_sed_strip_eq='s|=/|/|g' ;; -+ mingw* | cegcc*) lt_sed_strip_eq="s,=\([[A-Za-z]]:\),\1,g" ;; -+ *) lt_sed_strip_eq="s,=/,/,g" ;; - esac - lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` - case $lt_search_path_spec in -@@ -2310,35 +2184,28 @@ - ;; - esac - # Ok, now we have the path, separated by spaces, we can step through it -- # and add multilib dir if necessary... -+ # and add multilib dir if necessary. - lt_tmp_lt_search_path_spec= -- lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` -- # ...but if some path component already ends with the multilib dir we assume -- # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer). -- case "$lt_multi_os_dir; $lt_search_path_spec " in -- "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*) -- lt_multi_os_dir= -- ;; -- esac -+ lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` - for lt_sys_path in $lt_search_path_spec; do -- if test -d "$lt_sys_path$lt_multi_os_dir"; then -- lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir" -- elif test -n "$lt_multi_os_dir"; then -+ if test -d "$lt_sys_path/$lt_multi_os_dir"; then -+ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" -+ else - test -d "$lt_sys_path" && \ - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" - fi - done - lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' --BEGIN {RS = " "; FS = "/|\n";} { -- lt_foo = ""; -- lt_count = 0; -+BEGIN {RS=" "; FS="/|\n";} { -+ lt_foo=""; -+ lt_count=0; - for (lt_i = NF; lt_i > 0; lt_i--) { - if ($lt_i != "" && $lt_i != ".") { - if ($lt_i == "..") { - lt_count++; - } else { - if (lt_count == 0) { -- lt_foo = "/" $lt_i lt_foo; -+ lt_foo="/" $lt_i lt_foo; - } else { - lt_count--; - } -@@ -2352,7 +2219,7 @@ - # for these hosts. - case $host_os in - mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ -- $SED 's|/\([[A-Za-z]]:\)|\1|g'` ;; -+ $SED 's,/\([[A-Za-z]]:\),\1,g'` ;; - esac - sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` - else -@@ -2361,7 +2228,7 @@ - library_names_spec= - libname_spec='lib$name' - soname_spec= --shrext_cmds=.so -+shrext_cmds=".so" - postinstall_cmds= - postuninstall_cmds= - finish_cmds= -@@ -2378,17 +2245,14 @@ - # flags to be left without arguments - need_version=unknown - --AC_ARG_VAR([LT_SYS_LIBRARY_PATH], --[User-defined run-time library search path.]) -- - case $host_os in - aix3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname.a' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. -- soname_spec='$libname$release$shared_ext$major' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - - aix[[4-9]]*) -@@ -2396,91 +2260,41 @@ - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 -- library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with -- # the line '#! .'. This would cause the generated library to -- # depend on '.', always an invalid library. This was fixed in -+ # the line `#! .'. This would cause the generated library to -+ # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[[01]] | aix4.[[01]].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' -- echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then -+ echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac -- # Using Import Files as archive members, it is possible to support -- # filename-based versioning of shared library archives on AIX. While -- # this would work for both with and without runtime linking, it will -- # prevent static linking of such archives. So we do filename-based -- # shared library versioning with .so extension only, which is used -- # when both runtime linking and shared linking is enabled. -- # Unfortunately, runtime linking may impact performance, so we do -- # not want this to be the default eventually. Also, we use the -- # versioned .so libs for executables only if there is the -brtl -- # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. -- # To allow for filename-based versioning support, we need to create -- # libNAME.so.V as an archive file, containing: -- # *) an Import File, referring to the versioned filename of the -- # archive as well as the shared archive member, telling the -- # bitwidth (32 or 64) of that shared object, and providing the -- # list of exported symbols of that shared object, eventually -- # decorated with the 'weak' keyword -- # *) the shared object with the F_LOADONLY flag set, to really avoid -- # it being seen by the linker. -- # At run time we better use the real file rather than another symlink, -- # but for link time we create the symlink libNAME.so -> libNAME.so.V -- -- case $with_aix_soname,$aix_use_runtimelinking in -- # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct -+ # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. -- aix,yes) # traditional libtool -- dynamic_linker='AIX unversionable lib.so' -+ if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib<name>.so - # instead of lib<name>.a to let people know that these are not - # typical AIX shared libraries. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- ;; -- aix,no) # traditional AIX only -- dynamic_linker='AIX lib.a[(]lib.so.V[)]' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- ;; -- svr4,*) # full svr4 only -- dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)]" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,yes) # both, prefer svr4 -- dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)], lib.a[(]lib.so.V[)]" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # unpreferred sharedlib libNAME.a needs extra handling -- postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' -- postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,no) # both, prefer aix -- dynamic_linker="AIX lib.a[(]lib.so.V[)], lib.so.V[(]$shared_archive_member_spec.o[)]" -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling -- postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' -- postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' -- ;; -- esac -+ library_names_spec='${libname}${release}.a $libname.a' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ fi - shlibpath_var=LIBPATH - fi - ;; -@@ -2490,18 +2304,18 @@ - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. -- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' -+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - - beos*) -- library_names_spec='$libname$shared_ext' -+ library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; -@@ -2509,8 +2323,8 @@ - bsdi[[45]]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" -@@ -2522,7 +2336,7 @@ - - cygwin* | mingw* | pw32* | cegcc*) - version_type=windows -- shrext_cmds=.dll -+ shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - -@@ -2531,8 +2345,8 @@ - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ -@@ -2548,17 +2362,17 @@ - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' -- soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' -+ soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - m4_if([$1], [],[ - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"]) - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix -- soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' -- library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' -+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' -@@ -2567,8 +2381,8 @@ - *,cl*) - # Native MSVC - libname_spec='$name' -- soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' -- library_names_spec='$libname.dll.lib' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) -@@ -2595,7 +2409,7 @@ - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) -- sys_lib_search_path_spec=$LIB -+ sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -@@ -2608,8 +2422,8 @@ - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' -@@ -2622,7 +2436,7 @@ - - *) - # Assume MSVC wrapper -- library_names_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext $libname.lib' -+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac -@@ -2635,8 +2449,8 @@ - version_type=darwin - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$major$shared_ext' -+ library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' -+ soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' -@@ -2649,8 +2463,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -2668,13 +2482,12 @@ - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac -@@ -2699,15 +2512,26 @@ - esac - ;; - -+gnu*) -+ version_type=linux # correct to gnu/linux during the next big refactor -+ need_lib_prefix=no -+ need_version=no -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ shlibpath_var=LD_LIBRARY_PATH -+ shlibpath_overrides_runpath=no -+ hardcode_into_libs=yes -+ ;; -+ - haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH -- shlibpath_overrides_runpath=no -+ shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; -@@ -2725,15 +2549,14 @@ - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -- if test 32 = "$HPUX_IA64_MODE"; then -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux32 - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux64 - fi -+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' -@@ -2741,8 +2564,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; -@@ -2751,8 +2574,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... -@@ -2765,8 +2588,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -2777,7 +2600,7 @@ - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix -@@ -2785,8 +2608,8 @@ - esac - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= -@@ -2805,8 +2628,8 @@ - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no -- sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" -- sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" -+ sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" -+ sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -@@ -2815,33 +2638,13 @@ - dynamic_linker=no - ;; - --linux*android*) -- version_type=none # Android doesn't support versioned libraries. -- need_lib_prefix=no -- need_version=no -- library_names_spec='$libname$release$shared_ext' -- soname_spec='$libname$release$shared_ext' -- finish_cmds= -- shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -- -- # This implies no fast_install, which is unacceptable. -- # Some rework will be needed to allow for fast_install -- # before this can be enabled. -- hardcode_into_libs=yes -- -- dynamic_linker='Android linker' -- # Don't embed -rpath directories since the linker doesn't support them. -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- ;; -- - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -2866,15 +2669,14 @@ - # before this can be enabled. - hardcode_into_libs=yes - -- # Ideally, we could use ldconfig to report *all* directores which are -- # searched for libraries, however this is still not possible. Aside from not -- # being certain /sbin/ldconfig is available, command -- # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, -- # even though it is searched at run-time. Try to do the best guess by -- # appending ld.so.conf contents (and includes) to the search path. -+ # Add ABI-specific directories to the system library path. -+ sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" -+ -+ # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` -- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" -+ sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" -+ - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on -@@ -2891,12 +2693,12 @@ - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH -@@ -2906,7 +2708,7 @@ - - newsos6) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; -@@ -2915,68 +2717,58 @@ - version_type=qnx - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - --openbsd* | bitrig*) -+openbsd*) - version_type=sunos -- sys_lib_dlsearch_path_spec=/usr/lib -+ sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -- need_version=no -- else -- need_version=yes -- fi -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. -+ case $host_os in -+ openbsd3.3 | openbsd3.3.*) need_version=yes ;; -+ *) need_version=no ;; -+ esac -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ case $host_os in -+ openbsd2.[[89]] | openbsd2.[[89]].*) -+ shlibpath_overrides_runpath=no -+ ;; -+ *) -+ shlibpath_overrides_runpath=yes -+ ;; -+ esac -+ else -+ shlibpath_overrides_runpath=yes -+ fi - ;; - - os2*) - libname_spec='$name' -- version_type=windows -- shrext_cmds=.dll -- need_version=no -+ shrext_cmds=".dll" - need_lib_prefix=no -- # OS/2 can only load a DLL with a base name of 8 characters or less. -- soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; -- v=$($ECHO $release$versuffix | tr -d .-); -- n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); -- $ECHO $n$v`$shared_ext' -- library_names_spec='${libname}_dll.$libext' -+ library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' -- shlibpath_var=BEGINLIBPATH -- sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ -- dldir=$destdir/`dirname \$dlpath`~ -- test -d \$dldir || mkdir -p \$dldir~ -- $install_prog $dir/$dlname \$dldir/$dlname~ -- chmod a+x \$dldir/$dlname~ -- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then -- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; -- fi' -- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ -- dlpath=$dir/\$dldll~ -- $RM \$dlpath' -+ shlibpath_var=LIBPATH - ;; - - osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -+ sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - - rdos*) -@@ -2987,8 +2779,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -@@ -2998,11 +2790,11 @@ - - sunos4*) - version_type=sunos -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes -@@ -3010,8 +2802,8 @@ - - sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) -@@ -3032,24 +2824,24 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' -- soname_spec='$libname$shared_ext.$major' -+ library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' -+ soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) -- version_type=sco -+ version_type=freebsd-elf - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' -@@ -3067,7 +2859,7 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes -@@ -3075,8 +2867,8 @@ - - uts4*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -3085,30 +2877,20 @@ - ;; - esac - AC_MSG_RESULT([$dynamic_linker]) --test no = "$dynamic_linker" && can_build_shared=no -+test "$dynamic_linker" = no && can_build_shared=no - - variables_saved_for_relink="PATH $shlibpath_var $runpath_var" --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" - fi - --if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then -- sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec -+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then -+ sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" - fi -- --if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then -- sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec -+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then -+ sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" - fi - --# remember unaugmented sys_lib_dlsearch_path content for libtool script decls... --configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec -- --# ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code --func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" -- --# to be used as default LT_SYS_LIBRARY_PATH value in generated libtool --configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH -- - _LT_DECL([], [variables_saved_for_relink], [1], - [Variables whose values should be saved in libtool wrapper scripts and - restored at link time]) -@@ -3141,41 +2923,39 @@ - [Whether we should hardcode library paths into libraries]) - _LT_DECL([], [sys_lib_search_path_spec], [2], - [Compile-time system search path for libraries]) --_LT_DECL([sys_lib_dlsearch_path_spec], [configure_time_dlsearch_path], [2], -- [Detected run-time system search path for libraries]) --_LT_DECL([], [configure_time_lt_sys_library_path], [2], -- [Explicit LT_SYS_LIBRARY_PATH set during ./configure time]) -+_LT_DECL([], [sys_lib_dlsearch_path_spec], [2], -+ [Run-time system search path for libraries]) - ])# _LT_SYS_DYNAMIC_LINKER - - - # _LT_PATH_TOOL_PREFIX(TOOL) - # -------------------------- --# find a file program that can recognize shared library -+# find a file program which can recognize shared library - AC_DEFUN([_LT_PATH_TOOL_PREFIX], - [m4_require([_LT_DECL_EGREP])dnl - AC_MSG_CHECKING([for $1]) - AC_CACHE_VAL(lt_cv_path_MAGIC_CMD, - [case $MAGIC_CMD in - [[\\/*] | ?:[\\/]*]) -- lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. -+ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; - *) -- lt_save_MAGIC_CMD=$MAGIC_CMD -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_MAGIC_CMD="$MAGIC_CMD" -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - dnl $ac_dummy forces splitting on constant user-supplied paths. - dnl POSIX.2 word splitting is done only on the output of word expansions, - dnl not every word. This closes a longstanding sh security hole. - ac_dummy="m4_if([$2], , $PATH, [$2])" - for ac_dir in $ac_dummy; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- if test -f "$ac_dir/$1"; then -- lt_cv_path_MAGIC_CMD=$ac_dir/"$1" -+ if test -f $ac_dir/$1; then -+ lt_cv_path_MAGIC_CMD="$ac_dir/$1" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` -- MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : -@@ -3198,11 +2978,11 @@ - break - fi - done -- IFS=$lt_save_ifs -- MAGIC_CMD=$lt_save_MAGIC_CMD -+ IFS="$lt_save_ifs" -+ MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; - esac]) --MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if test -n "$MAGIC_CMD"; then - AC_MSG_RESULT($MAGIC_CMD) - else -@@ -3220,7 +3000,7 @@ - - # _LT_PATH_MAGIC - # -------------- --# find a file program that can recognize a shared library -+# find a file program which can recognize a shared library - m4_defun([_LT_PATH_MAGIC], - [_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH) - if test -z "$lt_cv_path_MAGIC_CMD"; then -@@ -3247,16 +3027,16 @@ - AC_ARG_WITH([gnu-ld], - [AS_HELP_STRING([--with-gnu-ld], - [assume the C compiler uses GNU ld @<:@default=no@:>@])], -- [test no = "$withval" || with_gnu_ld=yes], -+ [test "$withval" = no || with_gnu_ld=yes], - [with_gnu_ld=no])dnl - - ac_prog=ld --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - # Check if gcc -print-prog-name=ld gives a path. - AC_MSG_CHECKING([for ld used by $CC]) - case $host in - *-*-mingw*) -- # gcc leaves a trailing carriage return, which upsets mingw -+ # gcc leaves a trailing carriage return which upsets mingw - ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; - *) - ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; -@@ -3270,7 +3050,7 @@ - while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do - ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` - done -- test -z "$LD" && LD=$ac_prog -+ test -z "$LD" && LD="$ac_prog" - ;; - "") - # If it fails, then pretend we aren't using GCC. -@@ -3281,37 +3061,37 @@ - with_gnu_ld=unknown - ;; - esac --elif test yes = "$with_gnu_ld"; then -+elif test "$with_gnu_ld" = yes; then - AC_MSG_CHECKING([for GNU ld]) - else - AC_MSG_CHECKING([for non-GNU ld]) - fi - AC_CACHE_VAL(lt_cv_path_LD, - [if test -z "$LD"; then -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then -- lt_cv_path_LD=$ac_dir/$ac_prog -+ lt_cv_path_LD="$ac_dir/$ac_prog" - # Check to see if the program is GNU ld. I'd rather use --version, - # but apparently some variants of GNU ld only accept -v. - # Break only if it was the GNU/non-GNU ld that we prefer. - case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in - *GNU* | *'with BFD'*) -- test no != "$with_gnu_ld" && break -+ test "$with_gnu_ld" != no && break - ;; - *) -- test yes != "$with_gnu_ld" && break -+ test "$with_gnu_ld" != yes && break - ;; - esac - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - else -- lt_cv_path_LD=$LD # Let the user override the test with a path. -+ lt_cv_path_LD="$LD" # Let the user override the test with a path. - fi]) --LD=$lt_cv_path_LD -+LD="$lt_cv_path_LD" - if test -n "$LD"; then - AC_MSG_RESULT($LD) - else -@@ -3365,13 +3145,13 @@ - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - reload_cmds=false - fi - ;; - darwin*) -- if test yes = "$GCC"; then -- reload_cmds='$LTCC $LTCFLAGS -nostdlib $wl-r -o $output$reload_objs' -+ if test "$GCC" = yes; then -+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' - else - reload_cmds='$LD$reload_flag -o $output$reload_objs' - fi -@@ -3382,43 +3162,6 @@ - ])# _LT_CMD_RELOAD - - --# _LT_PATH_DD --# ----------- --# find a working dd --m4_defun([_LT_PATH_DD], --[AC_CACHE_CHECK([for a working dd], [ac_cv_path_lt_DD], --[printf 0123456789abcdef0123456789abcdef >conftest.i --cat conftest.i conftest.i >conftest2.i --: ${lt_DD:=$DD} --AC_PATH_PROGS_FEATURE_CHECK([lt_DD], [dd], --[if "$ac_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then -- cmp -s conftest.i conftest.out \ -- && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=: --fi]) --rm -f conftest.i conftest2.i conftest.out]) --])# _LT_PATH_DD -- -- --# _LT_CMD_TRUNCATE --# ---------------- --# find command to truncate a binary pipe --m4_defun([_LT_CMD_TRUNCATE], --[m4_require([_LT_PATH_DD]) --AC_CACHE_CHECK([how to truncate binary pipes], [lt_cv_truncate_bin], --[printf 0123456789abcdef0123456789abcdef >conftest.i --cat conftest.i conftest.i >conftest2.i --lt_cv_truncate_bin= --if "$ac_cv_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then -- cmp -s conftest.i conftest.out \ -- && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1" --fi --rm -f conftest.i conftest2.i conftest.out --test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q"]) --_LT_DECL([lt_truncate_bin], [lt_cv_truncate_bin], [1], -- [Command to truncate a binary pipe]) --])# _LT_CMD_TRUNCATE -- -- - # _LT_CHECK_MAGIC_METHOD - # ---------------------- - # how to check for library dependencies -@@ -3434,13 +3177,13 @@ - # Need to set the preceding variable on all platforms that support - # interlibrary dependencies. - # 'none' -- dependencies not supported. --# 'unknown' -- same as none, but documents that we really don't know. -+# `unknown' -- same as none, but documents that we really don't know. - # 'pass_all' -- all dependencies passed with no checks. - # 'test_compile' -- check by making test program. - # 'file_magic [[regex]]' -- check by looking for files in library path --# that responds to the $file_magic_cmd with a given extended regex. --# If you have 'file' or equivalent on your system and you're not sure --# whether 'pass_all' will *always* work, you probably want this one. -+# which responds to the $file_magic_cmd with a given extended regex. -+# If you have `file' or equivalent on your system and you're not sure -+# whether `pass_all' will *always* work, you probably want this one. - - case $host_os in - aix[[4-9]]*) -@@ -3467,7 +3210,8 @@ - # Base MSYS/MinGW do not provide the 'file' command needed by - # func_win32_libid shell function, so use a weaker test based on 'objdump', - # unless we find 'file', for example because we are cross-compiling. -- if ( file / ) >/dev/null 2>&1; then -+ # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. -+ if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -@@ -3503,6 +3247,10 @@ - fi - ;; - -+gnu*) -+ lt_cv_deplibs_check_method=pass_all -+ ;; -+ - haiku*) - lt_cv_deplibs_check_method=pass_all - ;; -@@ -3541,7 +3289,7 @@ - ;; - - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - -@@ -3563,8 +3311,8 @@ - lt_cv_deplibs_check_method=pass_all - ;; - --openbsd* | bitrig*) -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+openbsd*) -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' -@@ -3617,9 +3365,6 @@ - tpf*) - lt_cv_deplibs_check_method=pass_all - ;; --os2*) -- lt_cv_deplibs_check_method=pass_all -- ;; - esac - ]) - -@@ -3660,38 +3405,33 @@ - AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM, - [if test -n "$NM"; then - # Let the user override the test. -- lt_cv_path_NM=$NM -+ lt_cv_path_NM="$NM" - else -- lt_nm_to_check=${ac_tool_prefix}nm -+ lt_nm_to_check="${ac_tool_prefix}nm" - if test -n "$ac_tool_prefix" && test "$build" = "$host"; then - lt_nm_to_check="$lt_nm_to_check nm" - fi - for lt_tmp_nm in $lt_nm_to_check; do -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- tmp_nm=$ac_dir/$lt_tmp_nm -- if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then -+ tmp_nm="$ac_dir/$lt_tmp_nm" -+ if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then - # Check to see if the nm accepts a BSD-compat flag. -- # Adding the 'sed 1q' prevents false positives on HP-UX, which says: -+ # Adding the `sed 1q' prevents false positives on HP-UX, which says: - # nm: unknown option "B" ignored - # Tru64's nm complains that /dev/null is an invalid object file -- # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty -- case $build_os in -- mingw*) lt_bad_file=conftest.nm/nofile ;; -- *) lt_bad_file=/dev/null ;; -- esac -- case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in -- *$lt_bad_file* | *'Invalid file or object type'*) -+ case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in -+ */dev/null* | *'Invalid file or object type'*) - lt_cv_path_NM="$tmp_nm -B" -- break 2 -+ break - ;; - *) - case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in - */dev/null*) - lt_cv_path_NM="$tmp_nm -p" -- break 2 -+ break - ;; - *) - lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but -@@ -3702,21 +3442,21 @@ - esac - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - done - : ${lt_cv_path_NM=no} - fi]) --if test no != "$lt_cv_path_NM"; then -- NM=$lt_cv_path_NM -+if test "$lt_cv_path_NM" != "no"; then -+ NM="$lt_cv_path_NM" - else - # Didn't find any BSD compatible name lister, look for dumpbin. - if test -n "$DUMPBIN"; then : - # Let the user override the test. - else - AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :) -- case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in -+ case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in - *COFF*) -- DUMPBIN="$DUMPBIN -symbols -headers" -+ DUMPBIN="$DUMPBIN -symbols" - ;; - *) - DUMPBIN=: -@@ -3724,8 +3464,8 @@ - esac - fi - AC_SUBST([DUMPBIN]) -- if test : != "$DUMPBIN"; then -- NM=$DUMPBIN -+ if test "$DUMPBIN" != ":"; then -+ NM="$DUMPBIN" - fi - fi - test -z "$NM" && NM=nm -@@ -3771,8 +3511,8 @@ - - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) -- # two different shell functions defined in ltmain.sh; -- # decide which one to use based on capabilities of $DLLTOOL -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL - case `$DLLTOOL --help 2>&1` in - *--identify-strict*) - lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -@@ -3784,7 +3524,7 @@ - ;; - *) - # fallback: assume linklib IS sharedlib -- lt_cv_sharedlib_from_linklib_cmd=$ECHO -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" - ;; - esac - ]) -@@ -3811,28 +3551,13 @@ - lt_cv_path_mainfest_tool=yes - fi - rm -f conftest*]) --if test yes != "$lt_cv_path_mainfest_tool"; then -+if test "x$lt_cv_path_mainfest_tool" != xyes; then - MANIFEST_TOOL=: - fi - _LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl - ])# _LT_PATH_MANIFEST_TOOL - - --# _LT_DLL_DEF_P([FILE]) --# --------------------- --# True iff FILE is a Windows DLL '.def' file. --# Keep in sync with func_dll_def_p in the libtool script --AC_DEFUN([_LT_DLL_DEF_P], --[dnl -- test DEF = "`$SED -n dnl -- -e '\''s/^[[ ]]*//'\'' dnl Strip leading whitespace -- -e '\''/^\(;.*\)*$/d'\'' dnl Delete empty lines and comments -- -e '\''s/^\(EXPORTS\|LIBRARY\)\([[ ]].*\)*$/DEF/p'\'' dnl -- -e q dnl Only consider the first "real" line -- $1`" dnl --])# _LT_DLL_DEF_P -- -- - # LT_LIB_M - # -------- - # check for math library -@@ -3844,11 +3569,11 @@ - # These system don't have libm, or don't need it - ;; - *-ncr-sysv4.3*) -- AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM=-lmw) -+ AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw") - AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm") - ;; - *) -- AC_CHECK_LIB(m, cos, LIBM=-lm) -+ AC_CHECK_LIB(m, cos, LIBM="-lm") - ;; - esac - AC_SUBST([LIBM]) -@@ -3867,7 +3592,7 @@ - - _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= - --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - case $cc_basename in - nvcc*) - _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;; -@@ -3919,7 +3644,7 @@ - symcode='[[ABCDGISTW]]' - ;; - hpux*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - symcode='[[ABCDEGRST]]' - fi - ;; -@@ -3952,44 +3677,14 @@ - symcode='[[ABCDGIRSTW]]' ;; - esac - --if test "$lt_cv_nm_interface" = "MS dumpbin"; then -- # Gets list of data symbols to import. -- lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'" -- # Adjust the below global symbol transforms to fixup imported variables. -- lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'" -- lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'" -- lt_c_name_lib_hook="\ -- -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\ -- -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'" --else -- # Disable hooks by default. -- lt_cv_sys_global_symbol_to_import= -- lt_cdecl_hook= -- lt_c_name_hook= -- lt_c_name_lib_hook= --fi -- - # Transform an extracted symbol line into a proper C declaration. - # Some systems (esp. on ia64) link data and code symbols differently, - # so use this general approach. --lt_cv_sys_global_symbol_to_cdecl="sed -n"\ --$lt_cdecl_hook\ --" -e 's/^T .* \(.*\)$/extern int \1();/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'" -+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n"\ --$lt_c_name_hook\ --" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'" -- --# Transform an extracted symbol line into symbol name with lib prefix and --# symbol address. --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\ --$lt_c_name_lib_hook\ --" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ --" -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -4007,24 +3702,21 @@ - - # Write the raw and C identifiers. - if test "$lt_cv_nm_interface" = "MS dumpbin"; then -- # Fake it for dumpbin and say T for any non-static function, -- # D for any global variable and I for any imported variable. -+ # Fake it for dumpbin and say T for any non-static function -+ # and D for any global variable. - # Also find C++ and __fastcall symbols from MSVC++, - # which start with @ or ?. - lt_cv_sys_global_symbol_pipe="$AWK ['"\ - " {last_section=section; section=\$ 3};"\ - " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ - " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ --" /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\ --" /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\ --" /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\ - " \$ 0!~/External *\|/{next};"\ - " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ - " {if(hide[section]) next};"\ --" {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\ --" {split(\$ 0,a,/\||\r/); split(a[2],s)};"\ --" s[1]~/^[@?]/{print f,s[1],s[1]; next};"\ --" s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\ -+" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ -+" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ -+" s[1]~/^[@?]/{print s[1], s[1]; next};"\ -+" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ - " ' prfx=^$ac_symprfx]" - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" -@@ -4064,11 +3756,11 @@ - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext - /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ --#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE --/* DATA imports from DLLs on WIN32 can't be const, because runtime -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime - relocations are performed -- see ld's documentation on pseudo-relocs. */ - # define LT@&t@_DLSYM_CONST --#elif defined __osf__ -+#elif defined(__osf__) - /* This system does not cope well with relocations in const data. */ - # define LT@&t@_DLSYM_CONST - #else -@@ -4094,7 +3786,7 @@ - { - { "@PROGRAM@", (void *) 0 }, - _LT_EOF -- $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext -+ $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext - cat <<\_LT_EOF >> conftest.$ac_ext - {0, (void *) 0} - }; -@@ -4114,9 +3806,9 @@ - mv conftest.$ac_objext conftstm.$ac_objext - lt_globsym_save_LIBS=$LIBS - lt_globsym_save_CFLAGS=$CFLAGS -- LIBS=conftstm.$ac_objext -+ LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" -- if AC_TRY_EVAL(ac_link) && test -s conftest$ac_exeext; then -+ if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then - pipe_works=yes - fi - LIBS=$lt_globsym_save_LIBS -@@ -4137,7 +3829,7 @@ - rm -rf conftest* conftst* - - # Do not use the global_symbol_pipe unless it works. -- if test yes = "$pipe_works"; then -+ if test "$pipe_works" = yes; then - break - else - lt_cv_sys_global_symbol_pipe= -@@ -4164,16 +3856,12 @@ - [Take the output of nm and produce a listing of raw symbols and C names]) - _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], - [Transform the output of nm in a proper C declaration]) --_LT_DECL([global_symbol_to_import], [lt_cv_sys_global_symbol_to_import], [1], -- [Transform the output of nm into a list of symbols to manually relocate]) - _LT_DECL([global_symbol_to_c_name_address], - [lt_cv_sys_global_symbol_to_c_name_address], [1], - [Transform the output of nm in a C name address pair]) - _LT_DECL([global_symbol_to_c_name_address_lib_prefix], - [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], - [Transform the output of nm in a C name address pair when lib prefix is needed]) --_LT_DECL([nm_interface], [lt_cv_nm_interface], [1], -- [The name lister interface]) - _LT_DECL([], [nm_file_list_spec], [1], - [Specify filename containing input files for $NM]) - ]) # _LT_CMD_GLOBAL_SYMBOLS -@@ -4189,18 +3877,17 @@ - - m4_if([$1], [CXX], [ - # C++ specific cases for pic, static, wl, etc. -- if test yes = "$GXX"; then -+ if test "$GXX" = yes; then - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' - - case $host_os in - aix*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - - amigaos*) -@@ -4211,8 +3898,8 @@ - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but -- # adding the '-m68020' flag to GCC prevents building anything better, -- # like '-m68040'. -+ # adding the `-m68020' flag to GCC prevents building anything better, -+ # like `-m68040'. - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' - ;; - esac -@@ -4228,11 +3915,6 @@ - # (--disable-auto-import) libraries - m4_if([$1], [GCJ], [], - [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) -- case $host_os in -- os2*) -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' -- ;; -- esac - ;; - darwin* | rhapsody*) - # PIC is the default on this platform -@@ -4282,7 +3964,7 @@ - case $host_os in - aix[[4-9]]*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - else -@@ -4323,14 +4005,14 @@ - case $cc_basename in - CC*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' -- if test ia64 != "$host_cpu"; then -+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' -+ if test "$host_cpu" != ia64; then - _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' - fi - ;; - aCC*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' -+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' - case $host_cpu in - hppa*64*|ia64*) - # +Z the default -@@ -4359,7 +4041,7 @@ - ;; - esac - ;; -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in - KCC*) - # KAI C++ Compiler -@@ -4367,7 +4049,7 @@ - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - ecpc* ) -- # old Intel C++ for x86_64, which still supported -KPIC. -+ # old Intel C++ for x86_64 which still supported -KPIC. - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' -@@ -4512,18 +4194,17 @@ - fi - ], - [ -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' - - case $host_os in - aix*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - - amigaos*) -@@ -4534,8 +4215,8 @@ - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but -- # adding the '-m68020' flag to GCC prevents building anything better, -- # like '-m68040'. -+ # adding the `-m68020' flag to GCC prevents building anything better, -+ # like `-m68040'. - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' - ;; - esac -@@ -4552,11 +4233,6 @@ - # (--disable-auto-import) libraries - m4_if([$1], [GCJ], [], - [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) -- case $host_os in -- os2*) -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' -- ;; -- esac - ;; - - darwin* | rhapsody*) -@@ -4627,7 +4303,7 @@ - case $host_os in - aix*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - else -@@ -4635,30 +4311,11 @@ - fi - ;; - -- darwin* | rhapsody*) -- # PIC is the default on this platform -- # Common symbols not allowed in MH_DYLIB files -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' -- case $cc_basename in -- nagfor*) -- # NAG Fortran compiler -- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' -- _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' -- ;; -- esac -- ;; -- - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - m4_if([$1], [GCJ], [], - [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) -- case $host_os in -- os2*) -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' -- ;; -- esac - ;; - - hpux9* | hpux10* | hpux11*) -@@ -4674,7 +4331,7 @@ - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? -- _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' -+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) -@@ -4683,9 +4340,9 @@ - _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - ;; - -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in -- # old Intel for x86_64, which still supported -KPIC. -+ # old Intel for x86_64 which still supported -KPIC. - ecc*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' -@@ -4710,12 +4367,6 @@ - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' -- _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' -- _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' -- ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -4813,7 +4464,7 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi -@@ -4842,7 +4493,7 @@ - fi - ]) - case $host_os in -- # For platforms that do not support PIC, -DPIC is meaningless: -+ # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - _LT_TAGVAR(lt_prog_compiler_pic, $1)= - ;; -@@ -4908,21 +4559,17 @@ - case $host_os in - aix[[4-9]]*) - # If we're using GNU nm, then we don't want the "-C" option. -- # -C means demangle to GNU nm, but means don't demangle to AIX nm. -- # Without the "-l" option, or with the "-B" option, AIX nm treats -- # weak defined symbols like other global defined symbols, whereas -- # GNU nm marks them as "W". -- # While the 'weak' keyword is ignored in the Export File, we need -- # it in the Import File for the 'aix-soname' feature, so we have -- # to replace the "-B" option with "-P" for AIX nm. -+ # -C means demangle to AIX nm, but means don't demangle with GNU nm -+ # Also, AIX nm treats weak defined symbols like other global defined -+ # symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then -- _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else -- _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - ;; - pw32*) -- _LT_TAGVAR(export_symbols_cmds, $1)=$ltdll_cmds -+ _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" - ;; - cygwin* | mingw* | cegcc*) - case $cc_basename in -@@ -4968,9 +4615,9 @@ - # included in the symbol list - _LT_TAGVAR(include_expsyms, $1)= - # exclude_expsyms can be an extended regexp of symbols to exclude -- # it will be wrapped by ' (' and ')$', so one must not match beginning or -- # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', -- # as well as any symbol that contains 'd'. -+ # it will be wrapped by ` (' and `)$', so one must not match beginning or -+ # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', -+ # as well as any symbol that contains `d'. - _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if -@@ -4986,7 +4633,7 @@ - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; -@@ -4994,7 +4641,7 @@ - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; -- openbsd* | bitrig*) -+ openbsd*) - with_gnu_ld=no - ;; - esac -@@ -5004,7 +4651,7 @@ - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility -@@ -5026,24 +4673,24 @@ - esac - fi - -- if test yes = "$lt_use_gnu_ld_interface"; then -+ if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty -- wlarc='$wl' -+ wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then -- _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - _LT_TAGVAR(whole_archive_flag_spec, $1)= - fi - supports_anon_versioning=no -- case `$LD -v | $SED -e 's/([^)]\+)\s\+//' 2>&1` in -+ case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... -@@ -5056,7 +4703,7 @@ - case $host_os in - aix[[3-9]]*) - # On AIX/PPC, the GNU linker is very broken -- if test ia64 != "$host_cpu"; then -+ if test "$host_cpu" != ia64; then - _LT_TAGVAR(ld_shlibs, $1)=no - cat <<_LT_EOF 1>&2 - -@@ -5075,7 +4722,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='' - ;; - m68k) -@@ -5091,7 +4738,7 @@ - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME -- _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -5101,7 +4748,7 @@ - # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, - # as there is no search path for DLLs. - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_TAGVAR(always_export_symbols, $1)=no - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -@@ -5109,89 +4756,61 @@ - _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file, use it as -- # is; otherwise, prepend EXPORTS... -- _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi - ;; - - haiku*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(link_all_deplibs, $1)=yes - ;; - -- os2*) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(hardcode_minus_L, $1)=yes -- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -- shrext_cmds=.dll -- _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -- ;; -- - interix[[3-9]]*) - _LT_TAGVAR(hardcode_direct, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no -- if test linux-dietlibc = "$host_os"; then -+ if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ -- && test no = "$tmp_diet" -+ && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; -@@ -5202,47 +4821,42 @@ - lf95*) # Lahey Fortran 8.1 - _LT_TAGVAR(whole_archive_flag_spec, $1)= - tmp_sharedflag='--shared' ;; -- nagfor*) # NAGFOR 5.3 -- tmp_sharedflag='-Wl,-shared' ;; - xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - _LT_TAGVAR(compiler_needs_object, $1)=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - _LT_TAGVAR(compiler_needs_object, $1)=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac -- _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in -- tcc*) -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='-rdynamic' -- ;; - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -5256,8 +4870,8 @@ - _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -5275,8 +4889,8 @@ - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -5288,7 +4902,7 @@ - _LT_TAGVAR(ld_shlibs, $1)=no - cat <<_LT_EOF 1>&2 - --*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot -+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not - *** reliably create shared libraries on SCO systems. Therefore, libtool - *** is disabling shared libraries support. We urge you to upgrade GNU - *** binutils to release 2.16.91.0.3 or newer. Another option is to modify -@@ -5303,9 +4917,9 @@ - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -5322,15 +4936,15 @@ - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi - ;; - esac - -- if test no = "$_LT_TAGVAR(ld_shlibs, $1)"; then -+ if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then - runpath_var= - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= - _LT_TAGVAR(export_dynamic_flag_spec, $1)= -@@ -5346,7 +4960,7 @@ - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - _LT_TAGVAR(hardcode_minus_L, $1)=yes -- if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then -+ if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - _LT_TAGVAR(hardcode_direct, $1)=unsupported -@@ -5354,57 +4968,34 @@ - ;; - - aix[[4-9]]*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' -- no_entry_flag= -+ no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. -- # -C means demangle to GNU nm, but means don't demangle to AIX nm. -- # Without the "-l" option, or with the "-B" option, AIX nm treats -- # weak defined symbols like other global defined symbols, whereas -- # GNU nm marks them as "W". -- # While the 'weak' keyword is ignored in the Export File, we need -- # it in the Import File for the 'aix-soname' feature, so we have -- # to replace the "-B" option with "-P" for AIX nm. -+ # -C means demangle to AIX nm, but means don't demangle with GNU nm -+ # Also, AIX nm treats weak defined symbols like other global -+ # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then -- _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else -- _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we -- # have runtime linking enabled, and use it for executables. -- # For shared libraries, we enable/disable runtime linking -- # depending on the kind of the shared library created - -- # when "with_aix_soname,aix_use_runtimelinking" is: -- # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables -- # "aix,yes" lib.so shared, rtl:yes, for executables -- # lib.a static archive -- # "both,no" lib.so.V(shr.o) shared, rtl:yes -- # lib.a(lib.so.V) shared, rtl:no, for executables -- # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a(lib.so.V) shared, rtl:no -- # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a static archive -+ # need to do runtime linking. - case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) - for ld_flag in $LDFLAGS; do -- if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then -+ if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done -- if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then -- # With aix-soname=svr4, we create the lib.so.V shared archives only, -- # so we don't have lib.a shared libs to link our executables. -- # We have to force runtime linking in this case. -- aix_use_runtimelinking=yes -- LDFLAGS="$LDFLAGS -Wl,-brtl" -- fi - ;; - esac - -@@ -5423,21 +5014,13 @@ - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes - _LT_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(file_list_spec, $1)='$wl-f,' -- case $with_aix_soname,$aix_use_runtimelinking in -- aix,*) ;; # traditional, no import file -- svr4,* | *,yes) # use import file -- # The Import File defines what to hardcode. -- _LT_TAGVAR(hardcode_direct, $1)=no -- _LT_TAGVAR(hardcode_direct_absolute, $1)=no -- ;; -- esac -+ _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - case $host_os in aix4.[[012]]|aix4.[[012]].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ -- collect2name=`$CC -print-prog-name=collect2` -+ collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then -@@ -5456,80 +5039,61 @@ - ;; - esac - shared_flag='-shared' -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag="$shared_flag "'$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag="$shared_flag "'${wl}-G' - fi -- # Need to ensure runtime linking is disabled for the traditional -- # shared library, or the linker may eventually find shared libraries -- # /with/ Import File - we do not want to mix them. -- shared_flag_aix='-shared' -- shared_flag_svr4='-shared $wl-G' - else - # not using gcc -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag='$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag='${wl}-G' - else -- shared_flag='$wl-bM:SRE' -+ shared_flag='${wl}-bM:SRE' - fi -- shared_flag_aix='$wl-bM:SRE' -- shared_flag_svr4='$wl-G' - fi - fi - -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - _LT_TAGVAR(always_export_symbols, $1)=yes -- if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then -+ if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - _LT_TAGVAR(allow_undefined_flag, $1)='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. - _LT_SYS_MODULE_PATH_AIX([$1]) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -- if test ia64 = "$host_cpu"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib' -+ if test "$host_cpu" = ia64; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' - _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" -- _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" -+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. - _LT_SYS_MODULE_PATH_AIX([$1]) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -- _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok' -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok' -- if test yes = "$with_gnu_ld"; then -+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' -+ if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' - fi - _LT_TAGVAR(archive_cmds_need_lc, $1)=yes -- _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' -- # -brtl affects multiple linker settings, -berok does not and is overridden later -- compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`' -- if test svr4 != "$with_aix_soname"; then -- # This is similar to how AIX traditionally builds its shared libraries. -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' -- fi -- if test aix != "$with_aix_soname"; then -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' -- else -- # used by -dlpreopen to get the symbols -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir' -- fi -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d' -+ # This is similar to how AIX traditionally builds its shared libraries. -+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; -@@ -5538,7 +5102,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='' - ;; - m68k) -@@ -5568,17 +5132,16 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. -- _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' -- _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then -- cp "$export_symbols" "$output_objdir/$soname.def"; -- echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; -- else -- $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; -- fi~ -- $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -- linknames=' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -@@ -5587,18 +5150,18 @@ - # Don't use ranlib - _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' - _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ -- lt_tool_outputfile="@TOOL_OUTPUT@"~ -- case $lt_outputfile in -- *.exe|*.EXE) ;; -- *) -- lt_outputfile=$lt_outputfile.exe -- lt_tool_outputfile=$lt_tool_outputfile.exe -- ;; -- esac~ -- if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then -- $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -- $RM "$lt_outputfile.manifest"; -- fi' -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' - ;; - *) - # Assume MSVC wrapper -@@ -5607,7 +5170,7 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. -@@ -5657,33 +5220,33 @@ - ;; - - hpux9*) -- if test yes = "$GCC"; then -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(hardcode_direct, $1)=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - _LT_TAGVAR(hardcode_minus_L, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - ;; - - hpux10*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -- if test no = "$with_gnu_ld"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - _LT_TAGVAR(hardcode_minus_L, $1)=yes -@@ -5691,25 +5254,25 @@ - ;; - - hpux11*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - m4_if($1, [], [ -@@ -5717,14 +5280,14 @@ - # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) - _LT_LINKER_OPTION([if $CC understands -b], - _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b], -- [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'], -+ [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'], - [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])], -- [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags']) -+ [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags']) - ;; - esac - fi -- if test no = "$with_gnu_ld"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - - case $host_cpu in -@@ -5735,7 +5298,7 @@ - *) - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. -@@ -5746,16 +5309,16 @@ - ;; - - irix5* | irix6* | nonstopux*) -- if test yes = "$GCC"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. - # This should be the same for all languages, so no per-tag cache variable. - AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol], - [lt_cv_irix_exported_symbol], -- [save_LDFLAGS=$LDFLAGS -- LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" -+ [save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - AC_LINK_IFELSE( - [AC_LANG_SOURCE( - [AC_LANG_CASE([C], [[int foo (void) { return 0; }]], -@@ -5768,31 +5331,21 @@ - end]])])], - [lt_cv_irix_exported_symbol=yes], - [lt_cv_irix_exported_symbol=no]) -- LDFLAGS=$save_LDFLAGS]) -- if test yes = "$lt_cv_irix_exported_symbol"; then -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' -+ LDFLAGS="$save_LDFLAGS"]) -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - _LT_TAGVAR(archive_cmds_need_lc, $1)='no' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(inherit_rpath, $1)=yes - _LT_TAGVAR(link_all_deplibs, $1)=yes - ;; - -- linux*) -- case $cc_basename in -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- _LT_TAGVAR(ld_shlibs, $1)=yes -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- ;; -- esac -- ;; -- - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out -@@ -5807,7 +5360,7 @@ - newsos6) - _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_TAGVAR(hardcode_direct, $1)=yes -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; -@@ -5815,19 +5368,27 @@ - *nto* | *qnx*) - ;; - -- openbsd* | bitrig*) -+ openbsd*) - if test -f /usr/libexec/ld.so; then - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -+ case $host_os in -+ openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*) -+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' -+ ;; -+ *) -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ ;; -+ esac - fi - else - _LT_TAGVAR(ld_shlibs, $1)=no -@@ -5838,53 +5399,33 @@ - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_TAGVAR(hardcode_minus_L, $1)=yes - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -- shrext_cmds=.dll -- _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -+ _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' -+ _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) -- if test yes = "$GCC"; then -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - _LT_TAGVAR(archive_cmds_need_lc, $1)='no' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag -- if test yes = "$GCC"; then -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - else - _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ -- $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' -+ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' -@@ -5895,24 +5436,24 @@ - - solaris*) - _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' -- if test yes = "$GCC"; then -- wlarc='$wl' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ wlarc='${wl}' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' -- _LT_TAGVAR(archive_cmds, $1)='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' -+ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) -- wlarc='$wl' -- _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ wlarc='${wl}' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi -@@ -5922,11 +5463,11 @@ - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) - # The compiler driver will combine and reorder linker options, -- # but understands '-z linker_flag'. GCC discards it without '$wl', -+ # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) -- if test yes = "$GCC"; then -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' - fi -@@ -5936,10 +5477,10 @@ - ;; - - sunos4*) -- if test sequent = "$host_vendor"; then -+ if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -5988,43 +5529,43 @@ - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) -- _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' -+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' - _LT_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) -- # Note: We CANNOT use -z defs as we might desire, because we do not -+ # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. -- _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' -- _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs' -+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' -+ _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' - _LT_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - -@@ -6039,17 +5580,17 @@ - ;; - esac - -- if test sni = "$host_vendor"; then -+ if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Blargedynsym' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym' - ;; - esac - fi - fi - ]) - AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) --test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no -+test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no - - _LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld - -@@ -6066,7 +5607,7 @@ - # Assume -lc should be added - _LT_TAGVAR(archive_cmds_need_lc, $1)=yes - -- if test yes,yes = "$GCC,$enable_shared"; then -+ if test "$enable_shared" = yes && test "$GCC" = yes; then - case $_LT_TAGVAR(archive_cmds, $1) in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. -@@ -6146,12 +5687,12 @@ - _LT_TAGDECL([], [hardcode_libdir_separator], [1], - [Whether we need a single "-rpath" flag with a separated argument]) - _LT_TAGDECL([], [hardcode_direct], [0], -- [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes -+ [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes - DIR into the resulting binary]) - _LT_TAGDECL([], [hardcode_direct_absolute], [0], -- [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes -+ [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes - DIR into the resulting binary and the resulting library dependency is -- "absolute", i.e impossible to change by setting $shlibpath_var if the -+ "absolute", i.e impossible to change by setting ${shlibpath_var} if the - library is relocated]) - _LT_TAGDECL([], [hardcode_minus_L], [0], - [Set to "yes" if using the -LDIR flag during linking hardcodes DIR -@@ -6192,10 +5733,10 @@ - # ------------------------ - # Ensure that the configuration variables for a C compiler are suitably - # defined. These variables are subsequently used by _LT_CONFIG to write --# the compiler configuration to 'libtool'. -+# the compiler configuration to `libtool'. - m4_defun([_LT_LANG_C_CONFIG], - [m4_require([_LT_DECL_EGREP])dnl --lt_save_CC=$CC -+lt_save_CC="$CC" - AC_LANG_PUSH(C) - - # Source file extension for C test sources. -@@ -6235,18 +5776,18 @@ - LT_SYS_DLOPEN_SELF - _LT_CMD_STRIPLIB - -- # Report what library types will actually be built -+ # Report which library types will actually be built - AC_MSG_CHECKING([if libtool supports shared libraries]) - AC_MSG_RESULT([$can_build_shared]) - - AC_MSG_CHECKING([whether to build shared libraries]) -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' -@@ -6254,12 +5795,8 @@ - ;; - - aix[[4-9]]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -6267,13 +5804,13 @@ - - AC_MSG_CHECKING([whether to build static libraries]) - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - AC_MSG_RESULT([$enable_static]) - - _LT_CONFIG($1) - fi - AC_LANG_POP --CC=$lt_save_CC -+CC="$lt_save_CC" - ])# _LT_LANG_C_CONFIG - - -@@ -6281,14 +5818,14 @@ - # -------------------------- - # Ensure that the configuration variables for a C++ compiler are suitably - # defined. These variables are subsequently used by _LT_CONFIG to write --# the compiler configuration to 'libtool'. -+# the compiler configuration to `libtool'. - m4_defun([_LT_LANG_CXX_CONFIG], - [m4_require([_LT_FILEUTILS_DEFAULTS])dnl - m4_require([_LT_DECL_EGREP])dnl - m4_require([_LT_PATH_MANIFEST_TOOL])dnl --if test -n "$CXX" && ( test no != "$CXX" && -- ( (test g++ = "$CXX" && `g++ -v >/dev/null 2>&1` ) || -- (test g++ != "$CXX"))); then -+if test -n "$CXX" && ( test "X$CXX" != "Xno" && -+ ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || -+ (test "X$CXX" != "Xg++"))) ; then - AC_PROG_CXXCPP - else - _lt_caught_CXX_error=yes -@@ -6330,7 +5867,7 @@ - # the CXX compiler isn't working. Some variables (like enable_shared) - # are currently assumed to apply to all compilers on this platform, - # and will be corrupted by setting them based on a non-working compiler. --if test yes != "$_lt_caught_CXX_error"; then -+if test "$_lt_caught_CXX_error" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="int some_variable = 0;" - -@@ -6372,35 +5909,35 @@ - if test -n "$compiler"; then - # We don't want -fno-exception when compiling C++ code, so set the - # no_builtin_flag separately -- if test yes = "$GXX"; then -+ if test "$GXX" = yes; then - _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' - else - _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= - fi - -- if test yes = "$GXX"; then -+ if test "$GXX" = yes; then - # Set up default GNU C++ configuration - - LT_PATH_LD - - # Check if GNU C++ uses GNU ld as the underlying linker, since the - # archiving commands below assume that GNU ld is being used. -- if test yes = "$with_gnu_ld"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ if test "$with_gnu_ld" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - - # If archive_cmds runs LD, not CC, wlarc should be empty - # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to - # investigate it a little bit more. (MM) -- wlarc='$wl' -+ wlarc='${wl}' - - # ancient GNU ld didn't support --whole-archive et. al. - if eval "`$CC -print-prog-name=ld` --help 2>&1" | - $GREP 'no-whole-archive' > /dev/null; then -- _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - _LT_TAGVAR(whole_archive_flag_spec, $1)= - fi -@@ -6436,30 +5973,18 @@ - _LT_TAGVAR(ld_shlibs, $1)=no - ;; - aix[[4-9]]*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' -- no_entry_flag= -+ no_entry_flag="" - else - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we -- # have runtime linking enabled, and use it for executables. -- # For shared libraries, we enable/disable runtime linking -- # depending on the kind of the shared library created - -- # when "with_aix_soname,aix_use_runtimelinking" is: -- # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables -- # "aix,yes" lib.so shared, rtl:yes, for executables -- # lib.a static archive -- # "both,no" lib.so.V(shr.o) shared, rtl:yes -- # lib.a(lib.so.V) shared, rtl:no, for executables -- # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a(lib.so.V) shared, rtl:no -- # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a static archive -+ # need to do runtime linking. - case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) - for ld_flag in $LDFLAGS; do - case $ld_flag in -@@ -6469,13 +5994,6 @@ - ;; - esac - done -- if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then -- # With aix-soname=svr4, we create the lib.so.V shared archives only, -- # so we don't have lib.a shared libs to link our executables. -- # We have to force runtime linking in this case. -- aix_use_runtimelinking=yes -- LDFLAGS="$LDFLAGS -Wl,-brtl" -- fi - ;; - esac - -@@ -6494,21 +6012,13 @@ - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes - _LT_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(file_list_spec, $1)='$wl-f,' -- case $with_aix_soname,$aix_use_runtimelinking in -- aix,*) ;; # no import file -- svr4,* | *,yes) # use import file -- # The Import File defines what to hardcode. -- _LT_TAGVAR(hardcode_direct, $1)=no -- _LT_TAGVAR(hardcode_direct_absolute, $1)=no -- ;; -- esac -+ _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' - -- if test yes = "$GXX"; then -+ if test "$GXX" = yes; then - case $host_os in aix4.[[012]]|aix4.[[012]].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ -- collect2name=`$CC -print-prog-name=collect2` -+ collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then -@@ -6526,84 +6036,64 @@ - fi - esac - shared_flag='-shared' -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag=$shared_flag' $wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag="$shared_flag "'${wl}-G' - fi -- # Need to ensure runtime linking is disabled for the traditional -- # shared library, or the linker may eventually find shared libraries -- # /with/ Import File - we do not want to mix them. -- shared_flag_aix='-shared' -- shared_flag_svr4='-shared $wl-G' - else - # not using gcc -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag='$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag='${wl}-G' - else -- shared_flag='$wl-bM:SRE' -+ shared_flag='${wl}-bM:SRE' - fi -- shared_flag_aix='$wl-bM:SRE' -- shared_flag_svr4='$wl-G' - fi - fi - -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to - # export. - _LT_TAGVAR(always_export_symbols, $1)=yes -- if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then -+ if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. -- # The "-G" linker flag allows undefined symbols. -- _LT_TAGVAR(no_undefined_flag, $1)='-bernotok' -+ _LT_TAGVAR(allow_undefined_flag, $1)='-berok' - # Determine the default libpath from the value encoded in an empty - # executable. - _LT_SYS_MODULE_PATH_AIX([$1]) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -- if test ia64 = "$host_cpu"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib' -+ if test "$host_cpu" = ia64; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' - _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" -- _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" -+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. - _LT_SYS_MODULE_PATH_AIX([$1]) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -- _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok' -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok' -- if test yes = "$with_gnu_ld"; then -+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' -+ if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' - fi - _LT_TAGVAR(archive_cmds_need_lc, $1)=yes -- _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' -- # -brtl affects multiple linker settings, -berok does not and is overridden later -- compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`' -- if test svr4 != "$with_aix_soname"; then -- # This is similar to how AIX traditionally builds its shared -- # libraries. Need -bnortl late, we may have -brtl in LDFLAGS. -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' -- fi -- if test aix != "$with_aix_soname"; then -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' -- else -- # used by -dlpreopen to get the symbols -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir' -- fi -- _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d' -+ # This is similar to how AIX traditionally builds its shared -+ # libraries. -+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; -@@ -6613,7 +6103,7 @@ - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME -- _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -6641,58 +6131,57 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. -- _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' -- _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then -- cp "$export_symbols" "$output_objdir/$soname.def"; -- echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; -- else -- $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; -- fi~ -- $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -- linknames=' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - # Don't use ranlib - _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' - _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ -- lt_tool_outputfile="@TOOL_OUTPUT@"~ -- case $lt_outputfile in -- *.exe|*.EXE) ;; -- *) -- lt_outputfile=$lt_outputfile.exe -- lt_tool_outputfile=$lt_tool_outputfile.exe -- ;; -- esac~ -- func_to_tool_file "$lt_outputfile"~ -- if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then -- $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -- $RM "$lt_outputfile.manifest"; -- fi' -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ func_to_tool_file "$lt_outputfile"~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' - ;; - *) - # g++ - # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, - # as there is no search path for DLLs. - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_TAGVAR(always_export_symbols, $1)=no - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file, use it as -- # is; otherwise, prepend EXPORTS... -- _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -6703,34 +6192,6 @@ - _LT_DARWIN_LINKER_FEATURES($1) - ;; - -- os2*) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(hardcode_minus_L, $1)=yes -- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -- shrext_cmds=.dll -- _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -- ;; -- - dgux*) - case $cc_basename in - ec++*) -@@ -6765,15 +6226,18 @@ - _LT_TAGVAR(ld_shlibs, $1)=yes - ;; - -+ gnu*) -+ ;; -+ - haiku*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(link_all_deplibs, $1)=yes - ;; - - hpux9*) -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, - # but as the default -@@ -6785,7 +6249,7 @@ - _LT_TAGVAR(ld_shlibs, $1)=no - ;; - aCC*) -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. -@@ -6794,11 +6258,11 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' -+ output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' - ;; - *) -- if test yes = "$GXX"; then -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ if test "$GXX" = yes; then -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - # FIXME: insert proper C++ library support - _LT_TAGVAR(ld_shlibs, $1)=no -@@ -6808,15 +6272,15 @@ - ;; - - hpux10*|hpux11*) -- if test no = "$with_gnu_ld"; then -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' -+ if test $with_gnu_ld = no; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - - case $host_cpu in - hppa*64*|ia64*) - ;; - *) -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - ;; - esac - fi -@@ -6842,13 +6306,13 @@ - aCC*) - case $host_cpu in - hppa*64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - esac - # Commands to make compiler produce verbose output that lists -@@ -6859,20 +6323,20 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' -+ output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' - ;; - *) -- if test yes = "$GXX"; then -- if test no = "$with_gnu_ld"; then -+ if test "$GXX" = yes; then -+ if test $with_gnu_ld = no; then - case $host_cpu in - hppa*64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - esac - fi -@@ -6887,22 +6351,22 @@ - interix[[3-9]]*) - _LT_TAGVAR(hardcode_direct, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - irix5* | irix6*) - case $cc_basename in - CC*) - # SGI C++ -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - - # Archives containing C++ object files must be created using - # "CC -ar", where "CC" is the IRIX C++ compiler. This is -@@ -6911,22 +6375,22 @@ - _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs' - ;; - *) -- if test yes = "$GXX"; then -- if test no = "$with_gnu_ld"; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GXX" = yes; then -+ if test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' - fi - fi - _LT_TAGVAR(link_all_deplibs, $1)=yes - ;; - esac -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_TAGVAR(inherit_rpath, $1)=yes - ;; - -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in - KCC*) - # Kuck and Associates, Inc. (KAI) C++ Compiler -@@ -6934,8 +6398,8 @@ - # KCC will only create a shared library if the output file - # ends with ".so" (or ".sl" for HP-UX), so rename the library - # to its proper name (with version) after linking. -- _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib $wl-retain-symbols-file,$export_symbols; mv \$templib $lib' -+ _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. -@@ -6944,10 +6408,10 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' -+ output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - - # Archives containing C++ object files must be created using - # "CC -Bstatic", where "CC" is the KAI C++ compiler. -@@ -6961,59 +6425,59 @@ - # earlier do not add the objects themselves. - case `$CC -V 2>&1` in - *"Version 7."*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - ;; - *) # Version 8.0 or newer - tmp_idyn= - case $host_cpu in - ia64*) tmp_idyn=' -i_dynamic';; - esac -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - ;; - esac - _LT_TAGVAR(archive_cmds_need_lc, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - ;; - pgCC* | pgcpp*) - # Portland Group C++ compiler - case `$CC -V` in - *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*) - _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ -- rm -rf $tpldir~ -- $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ -- compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' -+ rm -rf $tpldir~ -+ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ -+ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' - _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ -- rm -rf $tpldir~ -- $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ -- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ -- $RANLIB $oldlib' -+ rm -rf $tpldir~ -+ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ -+ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ -+ $RANLIB $oldlib' - _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ -- rm -rf $tpldir~ -- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -+ rm -rf $tpldir~ -+ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ -- rm -rf $tpldir~ -- $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ rm -rf $tpldir~ -+ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' - ;; - *) # Version 6 and above use weak symbols -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' - ;; - esac - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl--rpath $wl$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - ;; - cxx*) - # Compaq C++ -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib $wl-retain-symbols-file $wl$export_symbols' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' - - runpath_var=LD_RUN_PATH - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' -@@ -7027,18 +6491,18 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' -+ output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' - ;; - xl* | mpixl* | bgxl*) - # IBM XL 8.0 on PPC, with GNU ld -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' -- _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- if test yes = "$supports_anon_versioning"; then -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ if test "x$supports_anon_versioning" = xyes; then - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - ;; - *) -@@ -7046,10 +6510,10 @@ - *Sun\ C*) - # Sun C++ 5.9 - _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' -- _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file $wl$export_symbols' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols' - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - _LT_TAGVAR(compiler_needs_object, $1)=yes - - # Not sure whether something based on -@@ -7107,17 +6571,22 @@ - _LT_TAGVAR(ld_shlibs, $1)=yes - ;; - -- openbsd* | bitrig*) -+ openbsd2*) -+ # C++ shared libraries are fairly broken -+ _LT_TAGVAR(ld_shlibs, $1)=no -+ ;; -+ -+ openbsd*) - if test -f /usr/libexec/ld.so; then - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_TAGVAR(hardcode_direct_absolute, $1)=yes - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -- if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`"; then -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file,$export_symbols -o $lib' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' -- _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' -+ if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - fi - output_verbose_link_cmd=func_echo_all - else -@@ -7133,9 +6602,9 @@ - # KCC will only create a shared library if the output file - # ends with ".so" (or ".sl" for HP-UX), so rename the library - # to its proper name (with version) after linking. -- _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' -+ _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - - # Archives containing C++ object files must be created using -@@ -7153,17 +6622,17 @@ - cxx*) - case $host in - osf3*) -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $soname `test -n "$verstring" && func_echo_all "$wl-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - ;; - *) - _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ -- echo "-hidden">> $lib.exp~ -- $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname $wl-input $wl$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~ -- $RM $lib.exp' -+ echo "-hidden">> $lib.exp~ -+ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~ -+ $RM $lib.exp' - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' - ;; - esac -@@ -7178,21 +6647,21 @@ - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. -- output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' -+ output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' - ;; - *) -- if test yes,no = "$GXX,$with_gnu_ld"; then -- _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' -+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' - case $host in - osf3*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - ;; - esac - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=: - - # Commands to make compiler produce verbose output that lists -@@ -7238,9 +6707,9 @@ - # Sun C++ 4.2, 5.x and Centerline C++ - _LT_TAGVAR(archive_cmds_need_lc,$1)=yes - _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' -- _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G$allow_undefined_flag $wl-M $wl$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' -+ $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' - - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -@@ -7248,7 +6717,7 @@ - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) - # The compiler driver will combine and reorder linker options, -- # but understands '-z linker_flag'. -+ # but understands `-z linker_flag'. - # Supported since Solaris 2.6 (maybe 2.5.1?) - _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' - ;; -@@ -7265,30 +6734,30 @@ - ;; - gcx*) - # Green Hills C++ Compiler -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - - # The C++ compiler must be used to create the archive. - _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs' - ;; - *) - # GNU C++ compiler with Solaris linker -- if test yes,no = "$GXX,$with_gnu_ld"; then -- _LT_TAGVAR(no_undefined_flag, $1)=' $wl-z ${wl}defs' -+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then -+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' - if $CC --version | $GREP -v '^2\.7' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared $pic_flag -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' - else -- # g++ 2.7 appears to require '-G' NOT '-shared' on this -+ # g++ 2.7 appears to require `-G' NOT `-shared' on this - # platform. -- _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' -+ $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when -@@ -7296,11 +6765,11 @@ - output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' - fi - -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $wl$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir' - case $host_os in - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) -- _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' -+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - ;; - esac - fi -@@ -7309,52 +6778,52 @@ - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) -- _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' -+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' - _LT_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no - runpath_var='LD_RUN_PATH' - - case $cc_basename in - CC*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - ;; - - sysv5* | sco3.2v5* | sco5v6*) -- # Note: We CANNOT use -z defs as we might desire, because we do not -+ # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. -- _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' -- _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs' -+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' -+ _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' - _LT_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir' -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' - _LT_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_TAGVAR(link_all_deplibs, $1)=yes -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - - case $cc_basename in - CC*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~ -- '"$_LT_TAGVAR(old_archive_cmds, $1)" -+ '"$_LT_TAGVAR(old_archive_cmds, $1)" - _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~ -- '"$_LT_TAGVAR(reload_cmds, $1)" -+ '"$_LT_TAGVAR(reload_cmds, $1)" - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - ;; -@@ -7385,10 +6854,10 @@ - esac - - AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) -- test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no -+ test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no - -- _LT_TAGVAR(GCC, $1)=$GXX -- _LT_TAGVAR(LD, $1)=$LD -+ _LT_TAGVAR(GCC, $1)="$GXX" -+ _LT_TAGVAR(LD, $1)="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change -@@ -7415,7 +6884,7 @@ - lt_cv_path_LD=$lt_save_path_LD - lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld - lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld --fi # test yes != "$_lt_caught_CXX_error" -+fi # test "$_lt_caught_CXX_error" != yes - - AC_LANG_POP - ])# _LT_LANG_CXX_CONFIG -@@ -7437,14 +6906,13 @@ - AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH]) - func_stripname_cnf () - { -- case @S|@2 in -- .*) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%\\\\@S|@2\$%%"`;; -- *) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%@S|@2\$%%"`;; -+ case ${2} in -+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; - esac - } # func_stripname_cnf - ])# _LT_FUNC_STRIPNAME_CNF - -- - # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) - # --------------------------------- - # Figure out "hidden" library dependencies from verbose -@@ -7528,13 +6996,13 @@ - pre_test_object_deps_done=no - - for p in `eval "$output_verbose_link_cmd"`; do -- case $prev$p in -+ case ${prev}${p} in - - -L* | -R* | -l*) - # Some compilers place space between "-{L,R}" and the path. - # Remove the space. -- if test x-L = "$p" || -- test x-R = "$p"; then -+ if test $p = "-L" || -+ test $p = "-R"; then - prev=$p - continue - fi -@@ -7550,16 +7018,16 @@ - case $p in - =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; - esac -- if test no = "$pre_test_object_deps_done"; then -- case $prev in -+ if test "$pre_test_object_deps_done" = no; then -+ case ${prev} in - -L | -R) - # Internal compiler library paths should come after those - # provided the user. The postdeps already come after the - # user supplied libs so there is no need to process them. - if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then -- _LT_TAGVAR(compiler_lib_search_path, $1)=$prev$p -+ _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}" - else -- _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} $prev$p" -+ _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}" - fi - ;; - # The "-l" case would never come before the object being -@@ -7567,9 +7035,9 @@ - esac - else - if test -z "$_LT_TAGVAR(postdeps, $1)"; then -- _LT_TAGVAR(postdeps, $1)=$prev$p -+ _LT_TAGVAR(postdeps, $1)="${prev}${p}" - else -- _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} $prev$p" -+ _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}" - fi - fi - prev= -@@ -7584,15 +7052,15 @@ - continue - fi - -- if test no = "$pre_test_object_deps_done"; then -+ if test "$pre_test_object_deps_done" = no; then - if test -z "$_LT_TAGVAR(predep_objects, $1)"; then -- _LT_TAGVAR(predep_objects, $1)=$p -+ _LT_TAGVAR(predep_objects, $1)="$p" - else - _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p" - fi - else - if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then -- _LT_TAGVAR(postdep_objects, $1)=$p -+ _LT_TAGVAR(postdep_objects, $1)="$p" - else - _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p" - fi -@@ -7623,6 +7091,51 @@ - _LT_TAGVAR(postdep_objects,$1)= - _LT_TAGVAR(postdeps,$1)= - ;; -+ -+linux*) -+ case `$CC -V 2>&1 | sed 5q` in -+ *Sun\ C*) -+ # Sun C++ 5.9 -+ -+ # The more standards-conforming stlport4 library is -+ # incompatible with the Cstd library. Avoid specifying -+ # it if it's in CXXFLAGS. Ignore libCrun as -+ # -library=stlport4 depends on it. -+ case " $CXX $CXXFLAGS " in -+ *" -library=stlport4 "*) -+ solaris_use_stlport4=yes -+ ;; -+ esac -+ -+ if test "$solaris_use_stlport4" != yes; then -+ _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' -+ fi -+ ;; -+ esac -+ ;; -+ -+solaris*) -+ case $cc_basename in -+ CC* | sunCC*) -+ # The more standards-conforming stlport4 library is -+ # incompatible with the Cstd library. Avoid specifying -+ # it if it's in CXXFLAGS. Ignore libCrun as -+ # -library=stlport4 depends on it. -+ case " $CXX $CXXFLAGS " in -+ *" -library=stlport4 "*) -+ solaris_use_stlport4=yes -+ ;; -+ esac -+ -+ # Adding this requires a known-good setup of shared libraries for -+ # Sun compiler versions before 5.6, else PIC objects from an old -+ # archive will be linked into the output, leading to subtle bugs. -+ if test "$solaris_use_stlport4" != yes; then -+ _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' -+ fi -+ ;; -+ esac -+ ;; - esac - ]) - -@@ -7631,7 +7144,7 @@ - esac - _LT_TAGVAR(compiler_lib_search_dirs, $1)= - if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then -- _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | $SED -e 's! -L! !g' -e 's!^ !!'` -+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` - fi - _LT_TAGDECL([], [compiler_lib_search_dirs], [1], - [The directories searched by this compiler when creating a shared library]) -@@ -7651,10 +7164,10 @@ - # -------------------------- - # Ensure that the configuration variables for a Fortran 77 compiler are - # suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_F77_CONFIG], - [AC_LANG_PUSH(Fortran 77) --if test -z "$F77" || test no = "$F77"; then -+if test -z "$F77" || test "X$F77" = "Xno"; then - _lt_disable_F77=yes - fi - -@@ -7691,7 +7204,7 @@ - # the F77 compiler isn't working. Some variables (like enable_shared) - # are currently assumed to apply to all compilers on this platform, - # and will be corrupted by setting them based on a non-working compiler. --if test yes != "$_lt_disable_F77"; then -+if test "$_lt_disable_F77" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="\ - subroutine t -@@ -7713,7 +7226,7 @@ - _LT_LINKER_BOILERPLATE - - # Allow CC to be a program name with arguments. -- lt_save_CC=$CC -+ lt_save_CC="$CC" - lt_save_GCC=$GCC - lt_save_CFLAGS=$CFLAGS - CC=${F77-"f77"} -@@ -7727,25 +7240,21 @@ - AC_MSG_RESULT([$can_build_shared]) - - AC_MSG_CHECKING([whether to build shared libraries]) -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - aix[[4-9]]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -7753,11 +7262,11 @@ - - AC_MSG_CHECKING([whether to build static libraries]) - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - AC_MSG_RESULT([$enable_static]) - -- _LT_TAGVAR(GCC, $1)=$G77 -- _LT_TAGVAR(LD, $1)=$LD -+ _LT_TAGVAR(GCC, $1)="$G77" -+ _LT_TAGVAR(LD, $1)="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change -@@ -7774,9 +7283,9 @@ - fi # test -n "$compiler" - - GCC=$lt_save_GCC -- CC=$lt_save_CC -- CFLAGS=$lt_save_CFLAGS --fi # test yes != "$_lt_disable_F77" -+ CC="$lt_save_CC" -+ CFLAGS="$lt_save_CFLAGS" -+fi # test "$_lt_disable_F77" != yes - - AC_LANG_POP - ])# _LT_LANG_F77_CONFIG -@@ -7786,11 +7295,11 @@ - # ------------------------- - # Ensure that the configuration variables for a Fortran compiler are - # suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_FC_CONFIG], - [AC_LANG_PUSH(Fortran) - --if test -z "$FC" || test no = "$FC"; then -+if test -z "$FC" || test "X$FC" = "Xno"; then - _lt_disable_FC=yes - fi - -@@ -7827,7 +7336,7 @@ - # the FC compiler isn't working. Some variables (like enable_shared) - # are currently assumed to apply to all compilers on this platform, - # and will be corrupted by setting them based on a non-working compiler. --if test yes != "$_lt_disable_FC"; then -+if test "$_lt_disable_FC" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="\ - subroutine t -@@ -7849,7 +7358,7 @@ - _LT_LINKER_BOILERPLATE - - # Allow CC to be a program name with arguments. -- lt_save_CC=$CC -+ lt_save_CC="$CC" - lt_save_GCC=$GCC - lt_save_CFLAGS=$CFLAGS - CC=${FC-"f95"} -@@ -7865,25 +7374,21 @@ - AC_MSG_RESULT([$can_build_shared]) - - AC_MSG_CHECKING([whether to build shared libraries]) -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - aix[[4-9]]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -7891,11 +7396,11 @@ - - AC_MSG_CHECKING([whether to build static libraries]) - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - AC_MSG_RESULT([$enable_static]) - -- _LT_TAGVAR(GCC, $1)=$ac_cv_fc_compiler_gnu -- _LT_TAGVAR(LD, $1)=$LD -+ _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu" -+ _LT_TAGVAR(LD, $1)="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change -@@ -7915,7 +7420,7 @@ - GCC=$lt_save_GCC - CC=$lt_save_CC - CFLAGS=$lt_save_CFLAGS --fi # test yes != "$_lt_disable_FC" -+fi # test "$_lt_disable_FC" != yes - - AC_LANG_POP - ])# _LT_LANG_FC_CONFIG -@@ -7925,7 +7430,7 @@ - # -------------------------- - # Ensure that the configuration variables for the GNU Java Compiler compiler - # are suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_GCJ_CONFIG], - [AC_REQUIRE([LT_PROG_GCJ])dnl - AC_LANG_SAVE -@@ -7959,7 +7464,7 @@ - CFLAGS=$GCJFLAGS - compiler=$CC - _LT_TAGVAR(compiler, $1)=$CC --_LT_TAGVAR(LD, $1)=$LD -+_LT_TAGVAR(LD, $1)="$LD" - _LT_CC_BASENAME([$compiler]) - - # GCJ did not exist at the time GCC didn't implicitly link libc in. -@@ -7996,7 +7501,7 @@ - # -------------------------- - # Ensure that the configuration variables for the GNU Go compiler - # are suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_GO_CONFIG], - [AC_REQUIRE([LT_PROG_GO])dnl - AC_LANG_SAVE -@@ -8030,7 +7535,7 @@ - CFLAGS=$GOFLAGS - compiler=$CC - _LT_TAGVAR(compiler, $1)=$CC --_LT_TAGVAR(LD, $1)=$LD -+_LT_TAGVAR(LD, $1)="$LD" - _LT_CC_BASENAME([$compiler]) - - # Go did not exist at the time GCC didn't implicitly link libc in. -@@ -8067,7 +7572,7 @@ - # ------------------------- - # Ensure that the configuration variables for the Windows resource compiler - # are suitably defined. These variables are subsequently used by _LT_CONFIG --# to write the compiler configuration to 'libtool'. -+# to write the compiler configuration to `libtool'. - m4_defun([_LT_LANG_RC_CONFIG], - [AC_REQUIRE([LT_PROG_RC])dnl - AC_LANG_SAVE -@@ -8083,7 +7588,7 @@ - lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }' - - # Code to be used in simple link tests --lt_simple_link_test_code=$lt_simple_compile_test_code -+lt_simple_link_test_code="$lt_simple_compile_test_code" - - # ltmain only uses $CC for tagged configurations so make sure $CC is set. - _LT_TAG_COMPILER -@@ -8093,7 +7598,7 @@ - _LT_LINKER_BOILERPLATE - - # Allow CC to be a program name with arguments. --lt_save_CC=$CC -+lt_save_CC="$CC" - lt_save_CFLAGS=$CFLAGS - lt_save_GCC=$GCC - GCC= -@@ -8122,7 +7627,7 @@ - [m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ], - [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ], - [AC_CHECK_TOOL(GCJ, gcj,) -- test set = "${GCJFLAGS+set}" || GCJFLAGS="-g -O2" -+ test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2" - AC_SUBST(GCJFLAGS)])])[]dnl - ]) - -@@ -8233,7 +7738,7 @@ - # Add /usr/xpg4/bin/sed as it is typically found on Solaris - # along with /bin/sed that truncates output. - for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do -- test ! -f "$lt_ac_sed" && continue -+ test ! -f $lt_ac_sed && continue - cat /dev/null > conftest.in - lt_ac_count=0 - echo $ECHO_N "0123456789$ECHO_C" >conftest.in -@@ -8250,9 +7755,9 @@ - $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break - cmp -s conftest.out conftest.nl || break - # 10000 chars as input seems more than enough -- test 10 -lt "$lt_ac_count" && break -+ test $lt_ac_count -gt 10 && break - lt_ac_count=`expr $lt_ac_count + 1` -- if test "$lt_ac_count" -gt "$lt_ac_max"; then -+ if test $lt_ac_count -gt $lt_ac_max; then - lt_ac_max=$lt_ac_count - lt_cv_path_SED=$lt_ac_sed - fi -@@ -8276,7 +7781,27 @@ - # Find out whether the shell is Bourne or XSI compatible, - # or has some other useful features. - m4_defun([_LT_CHECK_SHELL_FEATURES], --[if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then -+[AC_MSG_CHECKING([whether the shell understands some XSI constructs]) -+# Try some XSI features -+xsi_shell=no -+( _lt_dummy="a/b/c" -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ -+ && eval 'test $(( 1 + 1 )) -eq 2 \ -+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ -+ && xsi_shell=yes -+AC_MSG_RESULT([$xsi_shell]) -+_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell']) -+ -+AC_MSG_CHECKING([whether the shell understands "+="]) -+lt_shell_append=no -+( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \ -+ >/dev/null 2>&1 \ -+ && lt_shell_append=yes -+AC_MSG_RESULT([$lt_shell_append]) -+_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append']) -+ -+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - lt_unset=unset - else - lt_unset=false -@@ -8300,9 +7825,102 @@ - ])# _LT_CHECK_SHELL_FEATURES - - -+# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY) -+# ------------------------------------------------------ -+# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and -+# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY. -+m4_defun([_LT_PROG_FUNCTION_REPLACE], -+[dnl { -+sed -e '/^$1 ()$/,/^} # $1 /c\ -+$1 ()\ -+{\ -+m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1]) -+} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+]) -+ -+ -+# _LT_PROG_REPLACE_SHELLFNS -+# ------------------------- -+# Replace existing portable implementations of several shell functions with -+# equivalent extended shell implementations where those features are available.. -+m4_defun([_LT_PROG_REPLACE_SHELLFNS], -+[if test x"$xsi_shell" = xyes; then -+ _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl -+ case ${1} in -+ */*) func_dirname_result="${1%/*}${2}" ;; -+ * ) func_dirname_result="${3}" ;; -+ esac]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl -+ func_basename_result="${1##*/}"]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl -+ case ${1} in -+ */*) func_dirname_result="${1%/*}${2}" ;; -+ * ) func_dirname_result="${3}" ;; -+ esac -+ func_basename_result="${1##*/}"]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl -+ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -+ # positional parameters, so assign one to ordinary parameter first. -+ func_stripname_result=${3} -+ func_stripname_result=${func_stripname_result#"${1}"} -+ func_stripname_result=${func_stripname_result%"${2}"}]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl -+ func_split_long_opt_name=${1%%=*} -+ func_split_long_opt_arg=${1#*=}]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl -+ func_split_short_opt_arg=${1#??} -+ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl -+ case ${1} in -+ *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -+ *) func_lo2o_result=${1} ;; -+ esac]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}]) -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl -+ func_quote_for_eval "${2}" -+dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \ -+ eval "${1}+=\\\\ \\$func_quote_for_eval_result"]) -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ AC_MSG_WARN([Unable to substitute extended shell functions in $ofile]) -+fi -+]) -+ - # _LT_PATH_CONVERSION_FUNCTIONS - # ----------------------------- --# Determine what file name conversion functions should be used by -+# Determine which file name conversion functions should be used by - # func_to_host_file (and, implicitly, by func_to_host_path). These are needed - # for certain cross-compile configurations and native mingw. - m4_defun([_LT_PATH_CONVERSION_FUNCTIONS], -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/ltmain.sh psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/ltmain.sh ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/ltmain.sh 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/ltmain.sh 2020-07-16 10:48:35.464542572 +0200 -@@ -1,12 +1,9 @@ --#! /bin/sh --## DO NOT EDIT - This file generated from ./build-aux/ltmain.in --## by inline-source v2014-01-03.01 - --# libtool (GNU libtool) 2.4.6 --# Provide generalized library-building support services. -+# libtool (GNU libtool) 2.4.2 - # Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996 - --# Copyright (C) 1996-2015 Free Software Foundation, Inc. -+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, -+# 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc. - # This is free software; see the source for copying conditions. There is NO - # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - -@@ -26,2112 +23,881 @@ - # General Public License for more details. - # - # You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -- -- --PROGRAM=libtool --PACKAGE=libtool --VERSION=2.4.6 --package_revision=2.4.6 -- -- --## ------ ## --## Usage. ## --## ------ ## -- --# Run './libtool --help' for help with using this script from the --# command line. -- -- --## ------------------------------- ## --## User overridable command paths. ## --## ------------------------------- ## -- --# After configure completes, it has a better idea of some of the --# shell tools we need than the defaults used by the functions shared --# with bootstrap, so set those here where they can still be over- --# ridden by the user, but otherwise take precedence. -- --: ${AUTOCONF="autoconf"} --: ${AUTOMAKE="automake"} -- -- --## -------------------------- ## --## Source external libraries. ## --## -------------------------- ## -- --# Much of our low-level functionality needs to be sourced from external --# libraries, which are installed to $pkgauxdir. -- --# Set a version string for this script. --scriptversion=2015-01-20.17; # UTC -- --# General shell script boiler plate, and helper functions. --# Written by Gary V. Vaughan, 2004 -+# along with GNU Libtool; see the file COPYING. If not, a copy -+# can be downloaded from http://www.gnu.org/licenses/gpl.html, -+# or obtained by writing to the Free Software Foundation, Inc., -+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - --# Copyright (C) 2004-2015 Free Software Foundation, Inc. --# This is free software; see the source for copying conditions. There is NO --# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -- --# This program is free software; you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation; either version 3 of the License, or --# (at your option) any later version. -- --# As a special exception to the GNU General Public License, if you distribute --# this file as part of a program or library that is built using GNU Libtool, --# you may include this file under the same distribution terms that you use --# for the rest of that program. -- --# This program is distributed in the hope that it will be useful, --# but WITHOUT ANY WARRANTY; without even the implied warranty of --# MERCHANTABILITY or FITNES FOR A PARTICULAR PURPOSE. See the GNU --# General Public License for more details. -- --# You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -- --# Please report bugs or propose patches to gary@gnu.org. -- -- --## ------ ## --## Usage. ## --## ------ ## -- --# Evaluate this file near the top of your script to gain access to --# the functions and variables defined here: -+# Usage: $progname [OPTION]... [MODE-ARG]... - # --# . `echo "$0" | ${SED-sed} 's|[^/]*$||'`/build-aux/funclib.sh -+# Provide generalized library-building support services. - # --# If you need to override any of the default environment variable --# settings, do that before evaluating this file. -- -- --## -------------------- ## --## Shell normalisation. ## --## -------------------- ## -+# --config show all configuration variables -+# --debug enable verbose shell tracing -+# -n, --dry-run display commands without modifying any files -+# --features display basic configuration information and exit -+# --mode=MODE use operation mode MODE -+# --preserve-dup-deps don't remove duplicate dependency libraries -+# --quiet, --silent don't print informational messages -+# --no-quiet, --no-silent -+# print informational messages (default) -+# --no-warn don't display warning messages -+# --tag=TAG use configuration variables from tag TAG -+# -v, --verbose print more informational messages than default -+# --no-verbose don't print the extra informational messages -+# --version print version information -+# -h, --help, --help-all print short, long, or detailed help message -+# -+# MODE must be one of the following: -+# -+# clean remove files from the build directory -+# compile compile a source file into a libtool object -+# execute automatically set library path, then run a program -+# finish complete the installation of libtool libraries -+# install install libraries or executables -+# link create a library or an executable -+# uninstall remove libraries from an installed directory -+# -+# MODE-ARGS vary depending on the MODE. When passed as first option, -+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that. -+# Try `$progname --help --mode=MODE' for a more detailed description of MODE. -+# -+# When reporting a bug, please describe a test case to reproduce it and -+# include the following information: -+# -+# host-triplet: $host -+# shell: $SHELL -+# compiler: $LTCC -+# compiler flags: $LTCFLAGS -+# linker: $LD (gnu? $with_gnu_ld) -+# $progname: (GNU libtool) 2.4.2 -+# automake: $automake_version -+# autoconf: $autoconf_version -+# -+# Report bugs to <bug-libtool@gnu.org>. -+# GNU libtool home page: <http://www.gnu.org/software/libtool/>. -+# General help using GNU software: <http://www.gnu.org/gethelp/>. - --# Some shells need a little help to be as Bourne compatible as possible. --# Before doing anything else, make sure all that help has been provided! -+PROGRAM=libtool -+PACKAGE=libtool -+VERSION=2.4.2 -+TIMESTAMP="" -+package_revision=1.3337 - --DUALCASE=1; export DUALCASE # for MKS sh --if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : -+# Be Bourne compatible -+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then - emulate sh - NULLCMD=: -- # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which -+ # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST - else -- case `(set -o) 2>/dev/null` in *posix*) set -o posix ;; esac --fi -- --# NLS nuisances: We save the old values in case they are required later. --_G_user_locale= --_G_safe_locale= --for _G_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES --do -- eval "if test set = \"\${$_G_var+set}\"; then -- save_$_G_var=\$$_G_var -- $_G_var=C -- export $_G_var -- _G_user_locale=\"$_G_var=\\\$save_\$_G_var; \$_G_user_locale\" -- _G_safe_locale=\"$_G_var=C; \$_G_safe_locale\" -- fi" --done -- --# CDPATH. --(unset CDPATH) >/dev/null 2>&1 && unset CDPATH -- --# Make sure IFS has a sensible default --sp=' ' --nl=' --' --IFS="$sp $nl" -- --# There are apparently some retarded systems that use ';' as a PATH separator! --if test "${PATH_SEPARATOR+set}" != set; then -- PATH_SEPARATOR=: -- (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { -- (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || -- PATH_SEPARATOR=';' -- } -+ case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac - fi -+BIN_SH=xpg4; export BIN_SH # for Tru64 -+DUALCASE=1; export DUALCASE # for MKS sh - -- -- --## ------------------------- ## --## Locate command utilities. ## --## ------------------------- ## -- -- --# func_executable_p FILE --# ---------------------- --# Check that FILE is an executable regular file. --func_executable_p () -+# A function that is used when there is no print builtin or printf. -+func_fallback_echo () - { -- test -f "$1" && test -x "$1" --} -- -- --# func_path_progs PROGS_LIST CHECK_FUNC [PATH] --# -------------------------------------------- --# Search for either a program that responds to --version with output --# containing "GNU", or else returned by CHECK_FUNC otherwise, by --# trying all the directories in PATH with each of the elements of --# PROGS_LIST. --# --# CHECK_FUNC should accept the path to a candidate program, and --# set $func_check_prog_result if it truncates its output less than --# $_G_path_prog_max characters. --func_path_progs () --{ -- _G_progs_list=$1 -- _G_check_func=$2 -- _G_PATH=${3-"$PATH"} -- -- _G_path_prog_max=0 -- _G_path_prog_found=false -- _G_save_IFS=$IFS; IFS=${PATH_SEPARATOR-:} -- for _G_dir in $_G_PATH; do -- IFS=$_G_save_IFS -- test -z "$_G_dir" && _G_dir=. -- for _G_prog_name in $_G_progs_list; do -- for _exeext in '' .EXE; do -- _G_path_prog=$_G_dir/$_G_prog_name$_exeext -- func_executable_p "$_G_path_prog" || continue -- case `"$_G_path_prog" --version 2>&1` in -- *GNU*) func_path_progs_result=$_G_path_prog _G_path_prog_found=: ;; -- *) $_G_check_func $_G_path_prog -- func_path_progs_result=$func_check_prog_result -- ;; -- esac -- $_G_path_prog_found && break 3 -- done -- done -- done -- IFS=$_G_save_IFS -- test -z "$func_path_progs_result" && { -- echo "no acceptable sed could be found in \$PATH" >&2 -- exit 1 -- } --} -- -- --# We want to be able to use the functions in this file before configure --# has figured out where the best binaries are kept, which means we have --# to search for them ourselves - except when the results are already set --# where we skip the searches. -- --# Unless the user overrides by setting SED, search the path for either GNU --# sed, or the sed that truncates its output the least. --test -z "$SED" && { -- _G_sed_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ -- for _G_i in 1 2 3 4 5 6 7; do -- _G_sed_script=$_G_sed_script$nl$_G_sed_script -- done -- echo "$_G_sed_script" 2>/dev/null | sed 99q >conftest.sed -- _G_sed_script= -- -- func_check_prog_sed () -- { -- _G_path_prog=$1 -- -- _G_count=0 -- printf 0123456789 >conftest.in -- while : -- do -- cat conftest.in conftest.in >conftest.tmp -- mv conftest.tmp conftest.in -- cp conftest.in conftest.nl -- echo '' >> conftest.nl -- "$_G_path_prog" -f conftest.sed <conftest.nl >conftest.out 2>/dev/null || break -- diff conftest.out conftest.nl >/dev/null 2>&1 || break -- _G_count=`expr $_G_count + 1` -- if test "$_G_count" -gt "$_G_path_prog_max"; then -- # Best one so far, save it but keep looking for a better one -- func_check_prog_result=$_G_path_prog -- _G_path_prog_max=$_G_count -- fi -- # 10*(2^10) chars as input seems more than enough -- test 10 -lt "$_G_count" && break -- done -- rm -f conftest.in conftest.tmp conftest.nl conftest.out -- } -- -- func_path_progs "sed gsed" func_check_prog_sed $PATH:/usr/xpg4/bin -- rm -f conftest.sed -- SED=$func_path_progs_result -+ eval 'cat <<_LTECHO_EOF -+$1 -+_LTECHO_EOF' - } - -+# NLS nuisances: We save the old values to restore during execute mode. -+lt_user_locale= -+lt_safe_locale= -+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES -+do -+ eval "if test \"\${$lt_var+set}\" = set; then -+ save_$lt_var=\$$lt_var -+ $lt_var=C -+ export $lt_var -+ lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\" -+ lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\" -+ fi" -+done -+LC_ALL=C -+LANGUAGE=C -+export LANGUAGE LC_ALL - --# Unless the user overrides by setting GREP, search the path for either GNU --# grep, or the grep that truncates its output the least. --test -z "$GREP" && { -- func_check_prog_grep () -- { -- _G_path_prog=$1 -- -- _G_count=0 -- _G_path_prog_max=0 -- printf 0123456789 >conftest.in -- while : -- do -- cat conftest.in conftest.in >conftest.tmp -- mv conftest.tmp conftest.in -- cp conftest.in conftest.nl -- echo 'GREP' >> conftest.nl -- "$_G_path_prog" -e 'GREP$' -e '-(cannot match)-' <conftest.nl >conftest.out 2>/dev/null || break -- diff conftest.out conftest.nl >/dev/null 2>&1 || break -- _G_count=`expr $_G_count + 1` -- if test "$_G_count" -gt "$_G_path_prog_max"; then -- # Best one so far, save it but keep looking for a better one -- func_check_prog_result=$_G_path_prog -- _G_path_prog_max=$_G_count -- fi -- # 10*(2^10) chars as input seems more than enough -- test 10 -lt "$_G_count" && break -- done -- rm -f conftest.in conftest.tmp conftest.nl conftest.out -- } -+$lt_unset CDPATH - -- func_path_progs "grep ggrep" func_check_prog_grep $PATH:/usr/xpg4/bin -- GREP=$func_path_progs_result --} - -+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh -+# is ksh but when the shell is invoked as "sh" and the current value of -+# the _XPG environment variable is not equal to 1 (one), the special -+# positional parameter $0, within a function call, is the name of the -+# function. -+progpath="$0" - --## ------------------------------- ## --## User overridable command paths. ## --## ------------------------------- ## - --# All uppercase variable names are used for environment variables. These --# variables can be overridden by the user before calling a script that --# uses them if a suitable command of that name is not already available --# in the command search PATH. - - : ${CP="cp -f"} --: ${ECHO="printf %s\n"} --: ${EGREP="$GREP -E"} --: ${FGREP="$GREP -F"} --: ${LN_S="ln -s"} -+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'} - : ${MAKE="make"} - : ${MKDIR="mkdir"} - : ${MV="mv -f"} - : ${RM="rm -f"} - : ${SHELL="${CONFIG_SHELL-/bin/sh}"} -+: ${Xsed="$SED -e 1s/^X//"} - -+# Global variables: -+EXIT_SUCCESS=0 -+EXIT_FAILURE=1 -+EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. -+EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. - --## -------------------- ## --## Useful sed snippets. ## --## -------------------- ## -+exit_status=$EXIT_SUCCESS - --sed_dirname='s|/[^/]*$||' --sed_basename='s|^.*/||' -+# Make sure IFS has a sensible default -+lt_nl=' -+' -+IFS=" $lt_nl" - --# Sed substitution that helps us do robust quoting. It backslashifies --# metacharacters that are still active within double-quoted strings. --sed_quote_subst='s|\([`"$\\]\)|\\\1|g' -+dirname="s,/[^/]*$,," -+basename="s,^.*/,," - --# Same as above, but do not quote variable references. --sed_double_quote_subst='s/\(["`\\]\)/\\\1/g' -+# func_dirname file append nondir_replacement -+# Compute the dirname of FILE. If nonempty, add APPEND to the result, -+# otherwise set result to NONDIR_REPLACEMENT. -+func_dirname () -+{ -+ func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -+ if test "X$func_dirname_result" = "X${1}"; then -+ func_dirname_result="${3}" -+ else -+ func_dirname_result="$func_dirname_result${2}" -+ fi -+} # func_dirname may be replaced by extended shell implementation - --# Sed substitution that turns a string into a regex matching for the --# string literally. --sed_make_literal_regex='s|[].[^$\\*\/]|\\&|g' - --# Sed substitution that converts a w32 file name or path --# that contains forward slashes, into one that contains --# (escaped) backslashes. A very naive implementation. --sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' -+# func_basename file -+func_basename () -+{ -+ func_basename_result=`$ECHO "${1}" | $SED "$basename"` -+} # func_basename may be replaced by extended shell implementation - --# Re-'\' parameter expansions in output of sed_double_quote_subst that --# were '\'-ed in input to the same. If an odd number of '\' preceded a --# '$' in input to sed_double_quote_subst, that '$' was protected from --# expansion. Since each input '\' is now two '\'s, look for any number --# of runs of four '\'s followed by two '\'s and then a '$'. '\' that '$'. --_G_bs='\\' --_G_bs2='\\\\' --_G_bs4='\\\\\\\\' --_G_dollar='\$' --sed_double_backslash="\ -- s/$_G_bs4/&\\ --/g -- s/^$_G_bs2$_G_dollar/$_G_bs&/ -- s/\\([^$_G_bs]\\)$_G_bs2$_G_dollar/\\1$_G_bs2$_G_bs$_G_dollar/g -- s/\n//g" -+ -+# func_dirname_and_basename file append nondir_replacement -+# perform func_basename and func_dirname in a single function -+# call: -+# dirname: Compute the dirname of FILE. If nonempty, -+# add APPEND to the result, otherwise set result -+# to NONDIR_REPLACEMENT. -+# value returned in "$func_dirname_result" -+# basename: Compute filename of FILE. -+# value retuned in "$func_basename_result" -+# Implementation must be kept synchronized with func_dirname -+# and func_basename. For efficiency, we do not delegate to -+# those functions but instead duplicate the functionality here. -+func_dirname_and_basename () -+{ -+ # Extract subdirectory from the argument. -+ func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` -+ if test "X$func_dirname_result" = "X${1}"; then -+ func_dirname_result="${3}" -+ else -+ func_dirname_result="$func_dirname_result${2}" -+ fi -+ func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` -+} # func_dirname_and_basename may be replaced by extended shell implementation - - --## ----------------- ## --## Global variables. ## --## ----------------- ## -- --# Except for the global variables explicitly listed below, the following --# functions in the '^func_' namespace, and the '^require_' namespace --# variables initialised in the 'Resource management' section, sourcing --# this file will not pollute your global namespace with anything --# else. There's no portable way to scope variables in Bourne shell --# though, so actually running these functions will sometimes place --# results into a variable named after the function, and often use --# temporary variables in the '^_G_' namespace. If you are careful to --# avoid using those namespaces casually in your sourcing script, things --# should continue to work as you expect. And, of course, you can freely --# overwrite any of the functions or variables defined here before --# calling anything to customize them. -+# func_stripname prefix suffix name -+# strip PREFIX and SUFFIX off of NAME. -+# PREFIX and SUFFIX must not contain globbing or regex special -+# characters, hashes, percent signs, but SUFFIX may contain a leading -+# dot (in which case that matches only a dot). -+# func_strip_suffix prefix name -+func_stripname () -+{ -+ case ${2} in -+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -+ esac -+} # func_stripname may be replaced by extended shell implementation - --EXIT_SUCCESS=0 --EXIT_FAILURE=1 --EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. --EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. - --# Allow overriding, eg assuming that you follow the convention of --# putting '$debug_cmd' at the start of all your functions, you can get --# bash to show function call trace with: --# --# debug_cmd='eval echo "${FUNCNAME[0]} $*" >&2' bash your-script-name --debug_cmd=${debug_cmd-":"} --exit_cmd=: -+# These SED scripts presuppose an absolute path with a trailing slash. -+pathcar='s,^/\([^/]*\).*$,\1,' -+pathcdr='s,^/[^/]*,,' -+removedotparts=':dotsl -+ s@/\./@/@g -+ t dotsl -+ s,/\.$,/,' -+collapseslashes='s@/\{1,\}@/@g' -+finalslash='s,/*$,/,' - --# By convention, finish your script with: --# --# exit $exit_status --# --# so that you can set exit_status to non-zero if you want to indicate --# something went wrong during execution without actually bailing out at --# the point of failure. --exit_status=$EXIT_SUCCESS -+# func_normal_abspath PATH -+# Remove doubled-up and trailing slashes, "." path components, -+# and cancel out any ".." path components in PATH after making -+# it an absolute path. -+# value returned in "$func_normal_abspath_result" -+func_normal_abspath () -+{ -+ # Start from root dir and reassemble the path. -+ func_normal_abspath_result= -+ func_normal_abspath_tpath=$1 -+ func_normal_abspath_altnamespace= -+ case $func_normal_abspath_tpath in -+ "") -+ # Empty path, that just means $cwd. -+ func_stripname '' '/' "`pwd`" -+ func_normal_abspath_result=$func_stripname_result -+ return -+ ;; -+ # The next three entries are used to spot a run of precisely -+ # two leading slashes without using negated character classes; -+ # we take advantage of case's first-match behaviour. -+ ///*) -+ # Unusual form of absolute path, do nothing. -+ ;; -+ //*) -+ # Not necessarily an ordinary path; POSIX reserves leading '//' -+ # and for example Cygwin uses it to access remote file shares -+ # over CIFS/SMB, so we conserve a leading double slash if found. -+ func_normal_abspath_altnamespace=/ -+ ;; -+ /*) -+ # Absolute path, do nothing. -+ ;; -+ *) -+ # Relative path, prepend $cwd. -+ func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath -+ ;; -+ esac -+ # Cancel out all the simple stuff to save iterations. We also want -+ # the path to end with a slash for ease of parsing, so make sure -+ # there is one (and only one) here. -+ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -+ -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"` -+ while :; do -+ # Processed it all yet? -+ if test "$func_normal_abspath_tpath" = / ; then -+ # If we ascended to the root using ".." the result may be empty now. -+ if test -z "$func_normal_abspath_result" ; then -+ func_normal_abspath_result=/ -+ fi -+ break -+ fi -+ func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \ -+ -e "$pathcar"` -+ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -+ -e "$pathcdr"` -+ # Figure out what to do with it -+ case $func_normal_abspath_tcomponent in -+ "") -+ # Trailing empty path component, ignore it. -+ ;; -+ ..) -+ # Parent dir; strip last assembled component from result. -+ func_dirname "$func_normal_abspath_result" -+ func_normal_abspath_result=$func_dirname_result -+ ;; -+ *) -+ # Actual path component, append it. -+ func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent -+ ;; -+ esac -+ done -+ # Restore leading double-slash if one was found on entry. -+ func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result -+} - --# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh --# is ksh but when the shell is invoked as "sh" and the current value of --# the _XPG environment variable is not equal to 1 (one), the special --# positional parameter $0, within a function call, is the name of the --# function. --progpath=$0 -+# func_relative_path SRCDIR DSTDIR -+# generates a relative path from SRCDIR to DSTDIR, with a trailing -+# slash if non-empty, suitable for immediately appending a filename -+# without needing to append a separator. -+# value returned in "$func_relative_path_result" -+func_relative_path () -+{ -+ func_relative_path_result= -+ func_normal_abspath "$1" -+ func_relative_path_tlibdir=$func_normal_abspath_result -+ func_normal_abspath "$2" -+ func_relative_path_tbindir=$func_normal_abspath_result -+ -+ # Ascend the tree starting from libdir -+ while :; do -+ # check if we have found a prefix of bindir -+ case $func_relative_path_tbindir in -+ $func_relative_path_tlibdir) -+ # found an exact match -+ func_relative_path_tcancelled= -+ break -+ ;; -+ $func_relative_path_tlibdir*) -+ # found a matching prefix -+ func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir" -+ func_relative_path_tcancelled=$func_stripname_result -+ if test -z "$func_relative_path_result"; then -+ func_relative_path_result=. -+ fi -+ break -+ ;; -+ *) -+ func_dirname $func_relative_path_tlibdir -+ func_relative_path_tlibdir=${func_dirname_result} -+ if test "x$func_relative_path_tlibdir" = x ; then -+ # Have to descend all the way to the root! -+ func_relative_path_result=../$func_relative_path_result -+ func_relative_path_tcancelled=$func_relative_path_tbindir -+ break -+ fi -+ func_relative_path_result=../$func_relative_path_result -+ ;; -+ esac -+ done - --# The name of this program. --progname=`$ECHO "$progpath" |$SED "$sed_basename"` -+ # Now calculate path; take care to avoid doubling-up slashes. -+ func_stripname '' '/' "$func_relative_path_result" -+ func_relative_path_result=$func_stripname_result -+ func_stripname '/' '/' "$func_relative_path_tcancelled" -+ if test "x$func_stripname_result" != x ; then -+ func_relative_path_result=${func_relative_path_result}/${func_stripname_result} -+ fi - --# Make sure we have an absolute progpath for reexecution: -+ # Normalisation. If bindir is libdir, return empty string, -+ # else relative path ending with a slash; either way, target -+ # file name can be directly appended. -+ if test ! -z "$func_relative_path_result"; then -+ func_stripname './' '' "$func_relative_path_result/" -+ func_relative_path_result=$func_stripname_result -+ fi -+} -+ -+# The name of this program: -+func_dirname_and_basename "$progpath" -+progname=$func_basename_result -+ -+# Make sure we have an absolute path for reexecution: - case $progpath in - [\\/]*|[A-Za-z]:\\*) ;; - *[\\/]*) -- progdir=`$ECHO "$progpath" |$SED "$sed_dirname"` -+ progdir=$func_dirname_result - progdir=`cd "$progdir" && pwd` -- progpath=$progdir/$progname -+ progpath="$progdir/$progname" - ;; - *) -- _G_IFS=$IFS -+ save_IFS="$IFS" - IFS=${PATH_SEPARATOR-:} - for progdir in $PATH; do -- IFS=$_G_IFS -+ IFS="$save_IFS" - test -x "$progdir/$progname" && break - done -- IFS=$_G_IFS -+ IFS="$save_IFS" - test -n "$progdir" || progdir=`pwd` -- progpath=$progdir/$progname -+ progpath="$progdir/$progname" - ;; - esac - -+# Sed substitution that helps us do robust quoting. It backslashifies -+# metacharacters that are still active within double-quoted strings. -+Xsed="${SED}"' -e 1s/^X//' -+sed_quote_subst='s/\([`"$\\]\)/\\\1/g' - --## ----------------- ## --## Standard options. ## --## ----------------- ## -- --# The following options affect the operation of the functions defined --# below, and should be set appropriately depending on run-time para- --# meters passed on the command line. -- --opt_dry_run=false --opt_quiet=false --opt_verbose=false -- --# Categories 'all' and 'none' are always available. Append any others --# you will pass as the first argument to func_warning from your own --# code. --warning_categories= -- --# By default, display warnings according to 'opt_warning_types'. Set --# 'warning_func' to ':' to elide all warnings, or func_fatal_error to --# treat the next displayed warning as a fatal error. --warning_func=func_warn_and_continue -- --# Set to 'all' to display all warnings, 'none' to suppress all --# warnings, or a space delimited list of some subset of --# 'warning_categories' to display only the listed warnings. --opt_warning_types=all -- -- --## -------------------- ## --## Resource management. ## --## -------------------- ## -- --# This section contains definitions for functions that each ensure a --# particular resource (a file, or a non-empty configuration variable for --# example) is available, and if appropriate to extract default values --# from pertinent package files. Call them using their associated --# 'require_*' variable to ensure that they are executed, at most, once. --# --# It's entirely deliberate that calling these functions can set --# variables that don't obey the namespace limitations obeyed by the rest --# of this file, in order that that they be as useful as possible to --# callers. -- -- --# require_term_colors --# ------------------- --# Allow display of bold text on terminals that support it. --require_term_colors=func_require_term_colors --func_require_term_colors () --{ -- $debug_cmd -- -- test -t 1 && { -- # COLORTERM and USE_ANSI_COLORS environment variables take -- # precedence, because most terminfo databases neglect to describe -- # whether color sequences are supported. -- test -n "${COLORTERM+set}" && : ${USE_ANSI_COLORS="1"} -- -- if test 1 = "$USE_ANSI_COLORS"; then -- # Standard ANSI escape sequences -- tc_reset='[0m' -- tc_bold='[1m'; tc_standout='[7m' -- tc_red='[31m'; tc_green='[32m' -- tc_blue='[34m'; tc_cyan='[36m' -- else -- # Otherwise trust the terminfo database after all. -- test -n "`tput sgr0 2>/dev/null`" && { -- tc_reset=`tput sgr0` -- test -n "`tput bold 2>/dev/null`" && tc_bold=`tput bold` -- tc_standout=$tc_bold -- test -n "`tput smso 2>/dev/null`" && tc_standout=`tput smso` -- test -n "`tput setaf 1 2>/dev/null`" && tc_red=`tput setaf 1` -- test -n "`tput setaf 2 2>/dev/null`" && tc_green=`tput setaf 2` -- test -n "`tput setaf 4 2>/dev/null`" && tc_blue=`tput setaf 4` -- test -n "`tput setaf 5 2>/dev/null`" && tc_cyan=`tput setaf 5` -- } -- fi -- } -- -- require_term_colors=: --} -- -- --## ----------------- ## --## Function library. ## --## ----------------- ## -- --# This section contains a variety of useful functions to call in your --# scripts. Take note of the portable wrappers for features provided by --# some modern shells, which will fall back to slower equivalents on --# less featureful shells. -- -- --# func_append VAR VALUE --# --------------------- --# Append VALUE onto the existing contents of VAR. -- -- # We should try to minimise forks, especially on Windows where they are -- # unreasonably slow, so skip the feature probes when bash or zsh are -- # being used: -- if test set = "${BASH_VERSION+set}${ZSH_VERSION+set}"; then -- : ${_G_HAVE_ARITH_OP="yes"} -- : ${_G_HAVE_XSI_OPS="yes"} -- # The += operator was introduced in bash 3.1 -- case $BASH_VERSION in -- [12].* | 3.0 | 3.0*) ;; -- *) -- : ${_G_HAVE_PLUSEQ_OP="yes"} -- ;; -- esac -- fi -- -- # _G_HAVE_PLUSEQ_OP -- # Can be empty, in which case the shell is probed, "yes" if += is -- # useable or anything else if it does not work. -- test -z "$_G_HAVE_PLUSEQ_OP" \ -- && (eval 'x=a; x+=" b"; test "a b" = "$x"') 2>/dev/null \ -- && _G_HAVE_PLUSEQ_OP=yes -- --if test yes = "$_G_HAVE_PLUSEQ_OP" --then -- # This is an XSI compatible shell, allowing a faster implementation... -- eval 'func_append () -- { -- $debug_cmd -- -- eval "$1+=\$2" -- }' --else -- # ...otherwise fall back to using expr, which is often a shell builtin. -- func_append () -- { -- $debug_cmd -- -- eval "$1=\$$1\$2" -- } --fi -- -- --# func_append_quoted VAR VALUE --# ---------------------------- --# Quote VALUE and append to the end of shell variable VAR, separated --# by a space. --if test yes = "$_G_HAVE_PLUSEQ_OP"; then -- eval 'func_append_quoted () -- { -- $debug_cmd -- -- func_quote_for_eval "$2" -- eval "$1+=\\ \$func_quote_for_eval_result" -- }' --else -- func_append_quoted () -- { -- $debug_cmd -- -- func_quote_for_eval "$2" -- eval "$1=\$$1\\ \$func_quote_for_eval_result" -- } --fi -- -- --# func_append_uniq VAR VALUE --# -------------------------- --# Append unique VALUE onto the existing contents of VAR, assuming --# entries are delimited by the first character of VALUE. For example: --# --# func_append_uniq options " --another-option option-argument" --# --# will only append to $options if " --another-option option-argument " --# is not already present somewhere in $options already (note spaces at --# each end implied by leading space in second argument). --func_append_uniq () --{ -- $debug_cmd -- -- eval _G_current_value='`$ECHO $'$1'`' -- _G_delim=`expr "$2" : '\(.\)'` -- -- case $_G_delim$_G_current_value$_G_delim in -- *"$2$_G_delim"*) ;; -- *) func_append "$@" ;; -- esac --} -- -- --# func_arith TERM... --# ------------------ --# Set func_arith_result to the result of evaluating TERMs. -- test -z "$_G_HAVE_ARITH_OP" \ -- && (eval 'test 2 = $(( 1 + 1 ))') 2>/dev/null \ -- && _G_HAVE_ARITH_OP=yes -- --if test yes = "$_G_HAVE_ARITH_OP"; then -- eval 'func_arith () -- { -- $debug_cmd -- -- func_arith_result=$(( $* )) -- }' --else -- func_arith () -- { -- $debug_cmd -- -- func_arith_result=`expr "$@"` -- } --fi -- -- --# func_basename FILE --# ------------------ --# Set func_basename_result to FILE with everything up to and including --# the last / stripped. --if test yes = "$_G_HAVE_XSI_OPS"; then -- # If this shell supports suffix pattern removal, then use it to avoid -- # forking. Hide the definitions single quotes in case the shell chokes -- # on unsupported syntax... -- _b='func_basename_result=${1##*/}' -- _d='case $1 in -- */*) func_dirname_result=${1%/*}$2 ;; -- * ) func_dirname_result=$3 ;; -- esac' -- --else -- # ...otherwise fall back to using sed. -- _b='func_basename_result=`$ECHO "$1" |$SED "$sed_basename"`' -- _d='func_dirname_result=`$ECHO "$1" |$SED "$sed_dirname"` -- if test "X$func_dirname_result" = "X$1"; then -- func_dirname_result=$3 -- else -- func_append func_dirname_result "$2" -- fi' --fi -- --eval 'func_basename () --{ -- $debug_cmd -- -- '"$_b"' --}' -- -- --# func_dirname FILE APPEND NONDIR_REPLACEMENT --# ------------------------------------------- --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --eval 'func_dirname () --{ -- $debug_cmd -- -- '"$_d"' --}' -+# Same as above, but do not quote variable references. -+double_quote_subst='s/\(["`\\]\)/\\\1/g' - -+# Sed substitution that turns a string into a regex matching for the -+# string literally. -+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g' - --# func_dirname_and_basename FILE APPEND NONDIR_REPLACEMENT --# -------------------------------------------------------- --# Perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# For efficiency, we do not delegate to the functions above but instead --# duplicate the functionality here. --eval 'func_dirname_and_basename () --{ -- $debug_cmd -+# Sed substitution that converts a w32 file name or path -+# which contains forward slashes, into one that contains -+# (escaped) backslashes. A very naive implementation. -+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' - -- '"$_b"' -- '"$_d"' --}' -+# Re-`\' parameter expansions in output of double_quote_subst that were -+# `\'-ed in input to the same. If an odd number of `\' preceded a '$' -+# in input to double_quote_subst, that '$' was protected from expansion. -+# Since each input `\' is now two `\'s, look for any number of runs of -+# four `\'s followed by two `\'s and then a '$'. `\' that '$'. -+bs='\\' -+bs2='\\\\' -+bs4='\\\\\\\\' -+dollar='\$' -+sed_double_backslash="\ -+ s/$bs4/&\\ -+/g -+ s/^$bs2$dollar/$bs&/ -+ s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g -+ s/\n//g" - -+# Standard options: -+opt_dry_run=false -+opt_help=false -+opt_quiet=false -+opt_verbose=false -+opt_warning=: - --# func_echo ARG... --# ---------------- --# Echo program name prefixed message. -+# func_echo arg... -+# Echo program name prefixed message, along with the current mode -+# name if it has been set yet. - func_echo () - { -- $debug_cmd -+ $ECHO "$progname: ${opt_mode+$opt_mode: }$*" -+} - -- _G_message=$* -+# func_verbose arg... -+# Echo program name prefixed message in verbose mode only. -+func_verbose () -+{ -+ $opt_verbose && func_echo ${1+"$@"} - -- func_echo_IFS=$IFS -- IFS=$nl -- for _G_line in $_G_message; do -- IFS=$func_echo_IFS -- $ECHO "$progname: $_G_line" -- done -- IFS=$func_echo_IFS -+ # A bug in bash halts the script if the last line of a function -+ # fails when set -e is in force, so we need another command to -+ # work around that: -+ : - } - -- --# func_echo_all ARG... --# -------------------- -+# func_echo_all arg... - # Invoke $ECHO with all args, space-separated. - func_echo_all () - { - $ECHO "$*" - } - -- --# func_echo_infix_1 INFIX ARG... --# ------------------------------ --# Echo program name, followed by INFIX on the first line, with any --# additional lines not showing INFIX. --func_echo_infix_1 () --{ -- $debug_cmd -- -- $require_term_colors -- -- _G_infix=$1; shift -- _G_indent=$_G_infix -- _G_prefix="$progname: $_G_infix: " -- _G_message=$* -- -- # Strip color escape sequences before counting printable length -- for _G_tc in "$tc_reset" "$tc_bold" "$tc_standout" "$tc_red" "$tc_green" "$tc_blue" "$tc_cyan" -- do -- test -n "$_G_tc" && { -- _G_esc_tc=`$ECHO "$_G_tc" | $SED "$sed_make_literal_regex"` -- _G_indent=`$ECHO "$_G_indent" | $SED "s|$_G_esc_tc||g"` -- } -- done -- _G_indent="$progname: "`echo "$_G_indent" | $SED 's|.| |g'`" " ## exclude from sc_prohibit_nested_quotes -- -- func_echo_infix_1_IFS=$IFS -- IFS=$nl -- for _G_line in $_G_message; do -- IFS=$func_echo_infix_1_IFS -- $ECHO "$_G_prefix$tc_bold$_G_line$tc_reset" >&2 -- _G_prefix=$_G_indent -- done -- IFS=$func_echo_infix_1_IFS --} -- -- --# func_error ARG... --# ----------------- -+# func_error arg... - # Echo program name prefixed message to standard error. - func_error () - { -- $debug_cmd -+ $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2 -+} - -- $require_term_colors -+# func_warning arg... -+# Echo program name prefixed warning message to standard error. -+func_warning () -+{ -+ $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2 - -- func_echo_infix_1 " $tc_standout${tc_red}error$tc_reset" "$*" >&2 -+ # bash bug again: -+ : - } - -- --# func_fatal_error ARG... --# ----------------------- -+# func_fatal_error arg... - # Echo program name prefixed message to standard error, and exit. - func_fatal_error () - { -- $debug_cmd -- -- func_error "$*" -+ func_error ${1+"$@"} - exit $EXIT_FAILURE - } - -+# func_fatal_help arg... -+# Echo program name prefixed message to standard error, followed by -+# a help hint, and exit. -+func_fatal_help () -+{ -+ func_error ${1+"$@"} -+ func_fatal_error "$help" -+} -+help="Try \`$progname --help' for more information." ## default -+ - --# func_grep EXPRESSION FILENAME --# ----------------------------- -+# func_grep expression filename - # Check whether EXPRESSION matches any line of FILENAME, without output. - func_grep () - { -- $debug_cmd -- - $GREP "$1" "$2" >/dev/null 2>&1 - } - - --# func_len STRING --# --------------- --# Set func_len_result to the length of STRING. STRING may not --# start with a hyphen. -- test -z "$_G_HAVE_XSI_OPS" \ -- && (eval 'x=a/b/c; -- test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \ -- && _G_HAVE_XSI_OPS=yes -- --if test yes = "$_G_HAVE_XSI_OPS"; then -- eval 'func_len () -- { -- $debug_cmd -- -- func_len_result=${#1} -- }' --else -- func_len () -- { -- $debug_cmd -- -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` -- } --fi -- -- --# func_mkdir_p DIRECTORY-PATH --# --------------------------- -+# func_mkdir_p directory-path - # Make sure the entire path to DIRECTORY-PATH is available. - func_mkdir_p () - { -- $debug_cmd -- -- _G_directory_path=$1 -- _G_dir_list= -+ my_directory_path="$1" -+ my_dir_list= - -- if test -n "$_G_directory_path" && test : != "$opt_dry_run"; then -+ if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then - -- # Protect directory names starting with '-' -- case $_G_directory_path in -- -*) _G_directory_path=./$_G_directory_path ;; -+ # Protect directory names starting with `-' -+ case $my_directory_path in -+ -*) my_directory_path="./$my_directory_path" ;; - esac - - # While some portion of DIR does not yet exist... -- while test ! -d "$_G_directory_path"; do -+ while test ! -d "$my_directory_path"; do - # ...make a list in topmost first order. Use a colon delimited - # list incase some portion of path contains whitespace. -- _G_dir_list=$_G_directory_path:$_G_dir_list -+ my_dir_list="$my_directory_path:$my_dir_list" - - # If the last portion added has no slash in it, the list is done -- case $_G_directory_path in */*) ;; *) break ;; esac -+ case $my_directory_path in */*) ;; *) break ;; esac - - # ...otherwise throw away the child directory and loop -- _G_directory_path=`$ECHO "$_G_directory_path" | $SED -e "$sed_dirname"` -+ my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"` - done -- _G_dir_list=`$ECHO "$_G_dir_list" | $SED 's|:*$||'` -+ my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'` - -- func_mkdir_p_IFS=$IFS; IFS=: -- for _G_dir in $_G_dir_list; do -- IFS=$func_mkdir_p_IFS -- # mkdir can fail with a 'File exist' error if two processes -+ save_mkdir_p_IFS="$IFS"; IFS=':' -+ for my_dir in $my_dir_list; do -+ IFS="$save_mkdir_p_IFS" -+ # mkdir can fail with a `File exist' error if two processes - # try to create one of the directories concurrently. Don't - # stop in that case! -- $MKDIR "$_G_dir" 2>/dev/null || : -+ $MKDIR "$my_dir" 2>/dev/null || : - done -- IFS=$func_mkdir_p_IFS -+ IFS="$save_mkdir_p_IFS" - - # Bail out if we (or some other process) failed to create a directory. -- test -d "$_G_directory_path" || \ -- func_fatal_error "Failed to create '$1'" -+ test -d "$my_directory_path" || \ -+ func_fatal_error "Failed to create \`$1'" - fi - } - - --# func_mktempdir [BASENAME] --# ------------------------- -+# func_mktempdir [string] - # Make a temporary directory that won't clash with other running - # libtool processes, and avoids race conditions if possible. If --# given, BASENAME is the basename for that directory. -+# given, STRING is the basename for that directory. - func_mktempdir () - { -- $debug_cmd -+ my_template="${TMPDIR-/tmp}/${1-$progname}" - -- _G_template=${TMPDIR-/tmp}/${1-$progname} -- -- if test : = "$opt_dry_run"; then -+ if test "$opt_dry_run" = ":"; then - # Return a directory name, but don't create it in dry-run mode -- _G_tmpdir=$_G_template-$$ -+ my_tmpdir="${my_template}-$$" - else - - # If mktemp works, use that first and foremost -- _G_tmpdir=`mktemp -d "$_G_template-XXXXXXXX" 2>/dev/null` -+ my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null` - -- if test ! -d "$_G_tmpdir"; then -+ if test ! -d "$my_tmpdir"; then - # Failing that, at least try and use $RANDOM to avoid a race -- _G_tmpdir=$_G_template-${RANDOM-0}$$ -+ my_tmpdir="${my_template}-${RANDOM-0}$$" - -- func_mktempdir_umask=`umask` -+ save_mktempdir_umask=`umask` - umask 0077 -- $MKDIR "$_G_tmpdir" -- umask $func_mktempdir_umask -+ $MKDIR "$my_tmpdir" -+ umask $save_mktempdir_umask - fi - - # If we're not in dry-run mode, bomb out on failure -- test -d "$_G_tmpdir" || \ -- func_fatal_error "cannot create temporary directory '$_G_tmpdir'" -+ test -d "$my_tmpdir" || \ -+ func_fatal_error "cannot create temporary directory \`$my_tmpdir'" - fi - -- $ECHO "$_G_tmpdir" -+ $ECHO "$my_tmpdir" - } - - --# func_normal_abspath PATH --# ------------------------ --# Remove doubled-up and trailing slashes, "." path components, --# and cancel out any ".." path components in PATH after making --# it an absolute path. --func_normal_abspath () -+# func_quote_for_eval arg -+# Aesthetically quote ARG to be evaled later. -+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT -+# is double-quoted, suitable for a subsequent eval, whereas -+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters -+# which are still active within double quotes backslashified. -+func_quote_for_eval () - { -- $debug_cmd -+ case $1 in -+ *[\\\`\"\$]*) -+ func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;; -+ *) -+ func_quote_for_eval_unquoted_result="$1" ;; -+ esac - -- # These SED scripts presuppose an absolute path with a trailing slash. -- _G_pathcar='s|^/\([^/]*\).*$|\1|' -- _G_pathcdr='s|^/[^/]*||' -- _G_removedotparts=':dotsl -- s|/\./|/|g -- t dotsl -- s|/\.$|/|' -- _G_collapseslashes='s|/\{1,\}|/|g' -- _G_finalslash='s|/*$|/|' -- -- # Start from root dir and reassemble the path. -- func_normal_abspath_result= -- func_normal_abspath_tpath=$1 -- func_normal_abspath_altnamespace= -- case $func_normal_abspath_tpath in -- "") -- # Empty path, that just means $cwd. -- func_stripname '' '/' "`pwd`" -- func_normal_abspath_result=$func_stripname_result -- return -- ;; -- # The next three entries are used to spot a run of precisely -- # two leading slashes without using negated character classes; -- # we take advantage of case's first-match behaviour. -- ///*) -- # Unusual form of absolute path, do nothing. -- ;; -- //*) -- # Not necessarily an ordinary path; POSIX reserves leading '//' -- # and for example Cygwin uses it to access remote file shares -- # over CIFS/SMB, so we conserve a leading double slash if found. -- func_normal_abspath_altnamespace=/ -- ;; -- /*) -- # Absolute path, do nothing. -+ case $func_quote_for_eval_unquoted_result in -+ # Double-quote args containing shell metacharacters to delay -+ # word splitting, command substitution and and variable -+ # expansion for a subsequent eval. -+ # Many Bourne shells cannot handle close brackets correctly -+ # in scan sets, so we specify it separately. -+ *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") -+ func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\"" - ;; - *) -- # Relative path, prepend $cwd. -- func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath -- ;; -+ func_quote_for_eval_result="$func_quote_for_eval_unquoted_result" - esac -- -- # Cancel out all the simple stuff to save iterations. We also want -- # the path to end with a slash for ease of parsing, so make sure -- # there is one (and only one) here. -- func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -- -e "$_G_removedotparts" -e "$_G_collapseslashes" -e "$_G_finalslash"` -- while :; do -- # Processed it all yet? -- if test / = "$func_normal_abspath_tpath"; then -- # If we ascended to the root using ".." the result may be empty now. -- if test -z "$func_normal_abspath_result"; then -- func_normal_abspath_result=/ -- fi -- break -- fi -- func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \ -- -e "$_G_pathcar"` -- func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -- -e "$_G_pathcdr"` -- # Figure out what to do with it -- case $func_normal_abspath_tcomponent in -- "") -- # Trailing empty path component, ignore it. -- ;; -- ..) -- # Parent dir; strip last assembled component from result. -- func_dirname "$func_normal_abspath_result" -- func_normal_abspath_result=$func_dirname_result -- ;; -- *) -- # Actual path component, append it. -- func_append func_normal_abspath_result "/$func_normal_abspath_tcomponent" -- ;; -- esac -- done -- # Restore leading double-slash if one was found on entry. -- func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result --} -- -- --# func_notquiet ARG... --# -------------------- --# Echo program name prefixed message only when not in quiet mode. --func_notquiet () --{ -- $debug_cmd -- -- $opt_quiet || func_echo ${1+"$@"} -- -- # A bug in bash halts the script if the last line of a function -- # fails when set -e is in force, so we need another command to -- # work around that: -- : --} -- -- --# func_relative_path SRCDIR DSTDIR --# -------------------------------- --# Set func_relative_path_result to the relative path from SRCDIR to DSTDIR. --func_relative_path () --{ -- $debug_cmd -- -- func_relative_path_result= -- func_normal_abspath "$1" -- func_relative_path_tlibdir=$func_normal_abspath_result -- func_normal_abspath "$2" -- func_relative_path_tbindir=$func_normal_abspath_result -- -- # Ascend the tree starting from libdir -- while :; do -- # check if we have found a prefix of bindir -- case $func_relative_path_tbindir in -- $func_relative_path_tlibdir) -- # found an exact match -- func_relative_path_tcancelled= -- break -- ;; -- $func_relative_path_tlibdir*) -- # found a matching prefix -- func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir" -- func_relative_path_tcancelled=$func_stripname_result -- if test -z "$func_relative_path_result"; then -- func_relative_path_result=. -- fi -- break -- ;; -- *) -- func_dirname $func_relative_path_tlibdir -- func_relative_path_tlibdir=$func_dirname_result -- if test -z "$func_relative_path_tlibdir"; then -- # Have to descend all the way to the root! -- func_relative_path_result=../$func_relative_path_result -- func_relative_path_tcancelled=$func_relative_path_tbindir -- break -- fi -- func_relative_path_result=../$func_relative_path_result -- ;; -- esac -- done -- -- # Now calculate path; take care to avoid doubling-up slashes. -- func_stripname '' '/' "$func_relative_path_result" -- func_relative_path_result=$func_stripname_result -- func_stripname '/' '/' "$func_relative_path_tcancelled" -- if test -n "$func_stripname_result"; then -- func_append func_relative_path_result "/$func_stripname_result" -- fi -- -- # Normalisation. If bindir is libdir, return '.' else relative path. -- if test -n "$func_relative_path_result"; then -- func_stripname './' '' "$func_relative_path_result" -- func_relative_path_result=$func_stripname_result -- fi -- -- test -n "$func_relative_path_result" || func_relative_path_result=. -- -- : - } - - --# func_quote_for_eval ARG... --# -------------------------- --# Aesthetically quote ARGs to be evaled later. --# This function returns two values: --# i) func_quote_for_eval_result --# double-quoted, suitable for a subsequent eval --# ii) func_quote_for_eval_unquoted_result --# has all characters that are still active within double --# quotes backslashified. --func_quote_for_eval () --{ -- $debug_cmd -- -- func_quote_for_eval_unquoted_result= -- func_quote_for_eval_result= -- while test 0 -lt $#; do -- case $1 in -- *[\\\`\"\$]*) -- _G_unquoted_arg=`printf '%s\n' "$1" |$SED "$sed_quote_subst"` ;; -- *) -- _G_unquoted_arg=$1 ;; -- esac -- if test -n "$func_quote_for_eval_unquoted_result"; then -- func_append func_quote_for_eval_unquoted_result " $_G_unquoted_arg" -- else -- func_append func_quote_for_eval_unquoted_result "$_G_unquoted_arg" -- fi -- -- case $_G_unquoted_arg in -- # Double-quote args containing shell metacharacters to delay -- # word splitting, command substitution and variable expansion -- # for a subsequent eval. -- # Many Bourne shells cannot handle close brackets correctly -- # in scan sets, so we specify it separately. -- *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") -- _G_quoted_arg=\"$_G_unquoted_arg\" -- ;; -- *) -- _G_quoted_arg=$_G_unquoted_arg -- ;; -- esac -- -- if test -n "$func_quote_for_eval_result"; then -- func_append func_quote_for_eval_result " $_G_quoted_arg" -- else -- func_append func_quote_for_eval_result "$_G_quoted_arg" -- fi -- shift -- done --} -- -- --# func_quote_for_expand ARG --# ------------------------- -+# func_quote_for_expand arg - # Aesthetically quote ARG to be evaled later; same as above, - # but do not quote variable references. - func_quote_for_expand () - { -- $debug_cmd -- - case $1 in - *[\\\`\"]*) -- _G_arg=`$ECHO "$1" | $SED \ -- -e "$sed_double_quote_subst" -e "$sed_double_backslash"` ;; -+ my_arg=`$ECHO "$1" | $SED \ -+ -e "$double_quote_subst" -e "$sed_double_backslash"` ;; - *) -- _G_arg=$1 ;; -+ my_arg="$1" ;; - esac - -- case $_G_arg in -+ case $my_arg in - # Double-quote args containing shell metacharacters to delay - # word splitting and command substitution for a subsequent eval. - # Many Bourne shells cannot handle close brackets correctly - # in scan sets, so we specify it separately. - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") -- _G_arg=\"$_G_arg\" -+ my_arg="\"$my_arg\"" - ;; - esac - -- func_quote_for_expand_result=$_G_arg -+ func_quote_for_expand_result="$my_arg" - } - - --# func_stripname PREFIX SUFFIX NAME --# --------------------------------- --# strip PREFIX and SUFFIX from NAME, and store in func_stripname_result. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --if test yes = "$_G_HAVE_XSI_OPS"; then -- eval 'func_stripname () -- { -- $debug_cmd -- -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary variable first. -- func_stripname_result=$3 -- func_stripname_result=${func_stripname_result#"$1"} -- func_stripname_result=${func_stripname_result%"$2"} -- }' --else -- func_stripname () -- { -- $debug_cmd -- -- case $2 in -- .*) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%\\\\$2\$%%"`;; -- *) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%$2\$%%"`;; -- esac -- } --fi -- -- --# func_show_eval CMD [FAIL_EXP] --# ----------------------------- --# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is -+# func_show_eval cmd [fail_exp] -+# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is - # not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP - # is given, then evaluate it. - func_show_eval () - { -- $debug_cmd -- -- _G_cmd=$1 -- _G_fail_exp=${2-':'} -+ my_cmd="$1" -+ my_fail_exp="${2-:}" - -- func_quote_for_expand "$_G_cmd" -- eval "func_notquiet $func_quote_for_expand_result" -+ ${opt_silent-false} || { -+ func_quote_for_expand "$my_cmd" -+ eval "func_echo $func_quote_for_expand_result" -+ } - -- $opt_dry_run || { -- eval "$_G_cmd" -- _G_status=$? -- if test 0 -ne "$_G_status"; then -- eval "(exit $_G_status); $_G_fail_exp" -+ if ${opt_dry_run-false}; then :; else -+ eval "$my_cmd" -+ my_status=$? -+ if test "$my_status" -eq 0; then :; else -+ eval "(exit $my_status); $my_fail_exp" - fi -- } -+ fi - } - - --# func_show_eval_locale CMD [FAIL_EXP] --# ------------------------------------ --# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is -+# func_show_eval_locale cmd [fail_exp] -+# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is - # not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP - # is given, then evaluate it. Use the saved locale for evaluation. - func_show_eval_locale () - { -- $debug_cmd -- -- _G_cmd=$1 -- _G_fail_exp=${2-':'} -+ my_cmd="$1" -+ my_fail_exp="${2-:}" - -- $opt_quiet || { -- func_quote_for_expand "$_G_cmd" -+ ${opt_silent-false} || { -+ func_quote_for_expand "$my_cmd" - eval "func_echo $func_quote_for_expand_result" - } - -- $opt_dry_run || { -- eval "$_G_user_locale -- $_G_cmd" -- _G_status=$? -- eval "$_G_safe_locale" -- if test 0 -ne "$_G_status"; then -- eval "(exit $_G_status); $_G_fail_exp" -+ if ${opt_dry_run-false}; then :; else -+ eval "$lt_user_locale -+ $my_cmd" -+ my_status=$? -+ eval "$lt_safe_locale" -+ if test "$my_status" -eq 0; then :; else -+ eval "(exit $my_status); $my_fail_exp" - fi -- } -+ fi - } - -- - # func_tr_sh --# ---------- - # Turn $1 into a string suitable for a shell variable name. - # Result is stored in $func_tr_sh_result. All characters - # not in the set a-zA-Z0-9_ are replaced with '_'. Further, - # if $1 begins with a digit, a '_' is prepended as well. - func_tr_sh () - { -- $debug_cmd -- -- case $1 in -- [0-9]* | *[!a-zA-Z0-9_]*) -- func_tr_sh_result=`$ECHO "$1" | $SED -e 's/^\([0-9]\)/_\1/' -e 's/[^a-zA-Z0-9_]/_/g'` -- ;; -- * ) -- func_tr_sh_result=$1 -- ;; -- esac --} -- -- --# func_verbose ARG... --# ------------------- --# Echo program name prefixed message in verbose mode only. --func_verbose () --{ -- $debug_cmd -- -- $opt_verbose && func_echo "$*" -- -- : --} -- -- --# func_warn_and_continue ARG... --# ----------------------------- --# Echo program name prefixed warning message to standard error. --func_warn_and_continue () --{ -- $debug_cmd -- -- $require_term_colors -- -- func_echo_infix_1 "${tc_red}warning$tc_reset" "$*" >&2 --} -- -- --# func_warning CATEGORY ARG... --# ---------------------------- --# Echo program name prefixed warning message to standard error. Warning --# messages can be filtered according to CATEGORY, where this function --# elides messages where CATEGORY is not listed in the global variable --# 'opt_warning_types'. --func_warning () --{ -- $debug_cmd -- -- # CATEGORY must be in the warning_categories list! -- case " $warning_categories " in -- *" $1 "*) ;; -- *) func_internal_error "invalid warning category '$1'" ;; -- esac -- -- _G_category=$1 -- shift -- -- case " $opt_warning_types " in -- *" $_G_category "*) $warning_func ${1+"$@"} ;; -- esac --} -- -- --# func_sort_ver VER1 VER2 --# ----------------------- --# 'sort -V' is not generally available. --# Note this deviates from the version comparison in automake --# in that it treats 1.5 < 1.5.0, and treats 1.4.4a < 1.4-p3a --# but this should suffice as we won't be specifying old --# version formats or redundant trailing .0 in bootstrap.conf. --# If we did want full compatibility then we should probably --# use m4_version_compare from autoconf. --func_sort_ver () --{ -- $debug_cmd -- -- printf '%s\n%s\n' "$1" "$2" \ -- | sort -t. -k 1,1n -k 2,2n -k 3,3n -k 4,4n -k 5,5n -k 6,6n -k 7,7n -k 8,8n -k 9,9n --} -- --# func_lt_ver PREV CURR --# --------------------- --# Return true if PREV and CURR are in the correct order according to --# func_sort_ver, otherwise false. Use it like this: --# --# func_lt_ver "$prev_ver" "$proposed_ver" || func_fatal_error "..." --func_lt_ver () --{ -- $debug_cmd -- -- test "x$1" = x`func_sort_ver "$1" "$2" | $SED 1q` --} -- -- --# Local variables: --# mode: shell-script --# sh-indentation: 2 --# eval: (add-hook 'before-save-hook 'time-stamp) --# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC" --# time-stamp-time-zone: "UTC" --# End: --#! /bin/sh -- --# Set a version string for this script. --scriptversion=2014-01-07.03; # UTC -- --# A portable, pluggable option parser for Bourne shell. --# Written by Gary V. Vaughan, 2010 -- --# Copyright (C) 2010-2015 Free Software Foundation, Inc. --# This is free software; see the source for copying conditions. There is NO --# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -- --# This program is free software: you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation, either version 3 of the License, or --# (at your option) any later version. -- --# This program is distributed in the hope that it will be useful, --# but WITHOUT ANY WARRANTY; without even the implied warranty of --# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --# GNU General Public License for more details. -- --# You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -- --# Please report bugs or propose patches to gary@gnu.org. -- -- --## ------ ## --## Usage. ## --## ------ ## -- --# This file is a library for parsing options in your shell scripts along --# with assorted other useful supporting features that you can make use --# of too. --# --# For the simplest scripts you might need only: --# --# #!/bin/sh --# . relative/path/to/funclib.sh --# . relative/path/to/options-parser --# scriptversion=1.0 --# func_options ${1+"$@"} --# eval set dummy "$func_options_result"; shift --# ...rest of your script... --# --# In order for the '--version' option to work, you will need to have a --# suitably formatted comment like the one at the top of this file --# starting with '# Written by ' and ending with '# warranty; '. --# --# For '-h' and '--help' to work, you will also need a one line --# description of your script's purpose in a comment directly above the --# '# Written by ' line, like the one at the top of this file. --# --# The default options also support '--debug', which will turn on shell --# execution tracing (see the comment above debug_cmd below for another --# use), and '--verbose' and the func_verbose function to allow your script --# to display verbose messages only when your user has specified --# '--verbose'. --# --# After sourcing this file, you can plug processing for additional --# options by amending the variables from the 'Configuration' section --# below, and following the instructions in the 'Option parsing' --# section further down. -- --## -------------- ## --## Configuration. ## --## -------------- ## -- --# You should override these variables in your script after sourcing this --# file so that they reflect the customisations you have added to the --# option parser. -- --# The usage line for option parsing errors and the start of '-h' and --# '--help' output messages. You can embed shell variables for delayed --# expansion at the time the message is displayed, but you will need to --# quote other shell meta-characters carefully to prevent them being --# expanded when the contents are evaled. --usage='$progpath [OPTION]...' -- --# Short help message in response to '-h' and '--help'. Add to this or --# override it after sourcing this library to reflect the full set of --# options your script accepts. --usage_message="\ -- --debug enable verbose shell tracing -- -W, --warnings=CATEGORY -- report the warnings falling in CATEGORY [all] -- -v, --verbose verbosely report processing -- --version print version information and exit -- -h, --help print short or long help message and exit --" -- --# Additional text appended to 'usage_message' in response to '--help'. --long_help_message=" --Warning categories include: -- 'all' show all warnings -- 'none' turn off all the warnings -- 'error' warnings are treated as fatal errors" -- --# Help message printed before fatal option parsing errors. --fatal_help="Try '\$progname --help' for more information." -- -- -- --## ------------------------- ## --## Hook function management. ## --## ------------------------- ## -- --# This section contains functions for adding, removing, and running hooks --# to the main code. A hook is just a named list of of function, that can --# be run in order later on. -- --# func_hookable FUNC_NAME --# ----------------------- --# Declare that FUNC_NAME will run hooks added with --# 'func_add_hook FUNC_NAME ...'. --func_hookable () --{ -- $debug_cmd -- -- func_append hookable_fns " $1" --} -- -- --# func_add_hook FUNC_NAME HOOK_FUNC --# --------------------------------- --# Request that FUNC_NAME call HOOK_FUNC before it returns. FUNC_NAME must --# first have been declared "hookable" by a call to 'func_hookable'. --func_add_hook () --{ -- $debug_cmd -- -- case " $hookable_fns " in -- *" $1 "*) ;; -- *) func_fatal_error "'$1' does not accept hook functions." ;; -- esac -- -- eval func_append ${1}_hooks '" $2"' --} -- -- --# func_remove_hook FUNC_NAME HOOK_FUNC --# ------------------------------------ --# Remove HOOK_FUNC from the list of functions called by FUNC_NAME. --func_remove_hook () --{ -- $debug_cmd -- -- eval ${1}_hooks='`$ECHO "\$'$1'_hooks" |$SED "s| '$2'||"`' --} -- -- --# func_run_hooks FUNC_NAME [ARG]... --# --------------------------------- --# Run all hook functions registered to FUNC_NAME. --# It is assumed that the list of hook functions contains nothing more --# than a whitespace-delimited list of legal shell function names, and --# no effort is wasted trying to catch shell meta-characters or preserve --# whitespace. --func_run_hooks () --{ -- $debug_cmd -- -- case " $hookable_fns " in -- *" $1 "*) ;; -- *) func_fatal_error "'$1' does not support hook funcions.n" ;; -- esac -- -- eval _G_hook_fns=\$$1_hooks; shift -- -- for _G_hook in $_G_hook_fns; do -- eval $_G_hook '"$@"' -- -- # store returned options list back into positional -- # parameters for next 'cmd' execution. -- eval _G_hook_result=\$${_G_hook}_result -- eval set dummy "$_G_hook_result"; shift -- done -- -- func_quote_for_eval ${1+"$@"} -- func_run_hooks_result=$func_quote_for_eval_result --} -- -- -- --## --------------- ## --## Option parsing. ## --## --------------- ## -- --# In order to add your own option parsing hooks, you must accept the --# full positional parameter list in your hook function, remove any --# options that you action, and then pass back the remaining unprocessed --# options in '<hooked_function_name>_result', escaped suitably for --# 'eval'. Like this: --# --# my_options_prep () --# { --# $debug_cmd --# --# # Extend the existing usage message. --# usage_message=$usage_message' --# -s, --silent don'\''t print informational messages --# ' --# --# func_quote_for_eval ${1+"$@"} --# my_options_prep_result=$func_quote_for_eval_result --# } --# func_add_hook func_options_prep my_options_prep --# --# --# my_silent_option () --# { --# $debug_cmd --# --# # Note that for efficiency, we parse as many options as we can --# # recognise in a loop before passing the remainder back to the --# # caller on the first unrecognised argument we encounter. --# while test $# -gt 0; do --# opt=$1; shift --# case $opt in --# --silent|-s) opt_silent=: ;; --# # Separate non-argument short options: --# -s*) func_split_short_opt "$_G_opt" --# set dummy "$func_split_short_opt_name" \ --# "-$func_split_short_opt_arg" ${1+"$@"} --# shift --# ;; --# *) set dummy "$_G_opt" "$*"; shift; break ;; --# esac --# done --# --# func_quote_for_eval ${1+"$@"} --# my_silent_option_result=$func_quote_for_eval_result --# } --# func_add_hook func_parse_options my_silent_option --# --# --# my_option_validation () --# { --# $debug_cmd --# --# $opt_silent && $opt_verbose && func_fatal_help "\ --# '--silent' and '--verbose' options are mutually exclusive." --# --# func_quote_for_eval ${1+"$@"} --# my_option_validation_result=$func_quote_for_eval_result --# } --# func_add_hook func_validate_options my_option_validation --# --# You'll alse need to manually amend $usage_message to reflect the extra --# options you parse. It's preferable to append if you can, so that --# multiple option parsing hooks can be added safely. -- -- --# func_options [ARG]... --# --------------------- --# All the functions called inside func_options are hookable. See the --# individual implementations for details. --func_hookable func_options --func_options () --{ -- $debug_cmd -- -- func_options_prep ${1+"$@"} -- eval func_parse_options \ -- ${func_options_prep_result+"$func_options_prep_result"} -- eval func_validate_options \ -- ${func_parse_options_result+"$func_parse_options_result"} -- -- eval func_run_hooks func_options \ -- ${func_validate_options_result+"$func_validate_options_result"} -- -- # save modified positional parameters for caller -- func_options_result=$func_run_hooks_result --} -- -- --# func_options_prep [ARG]... --# -------------------------- --# All initialisations required before starting the option parse loop. --# Note that when calling hook functions, we pass through the list of --# positional parameters. If a hook function modifies that list, and --# needs to propogate that back to rest of this script, then the complete --# modified list must be put in 'func_run_hooks_result' before --# returning. --func_hookable func_options_prep --func_options_prep () --{ -- $debug_cmd -- -- # Option defaults: -- opt_verbose=false -- opt_warning_types= -- -- func_run_hooks func_options_prep ${1+"$@"} -- -- # save modified positional parameters for caller -- func_options_prep_result=$func_run_hooks_result --} -- -- --# func_parse_options [ARG]... --# --------------------------- --# The main option parsing loop. --func_hookable func_parse_options --func_parse_options () --{ -- $debug_cmd -- -- func_parse_options_result= -- -- # this just eases exit handling -- while test $# -gt 0; do -- # Defer to hook functions for initial option parsing, so they -- # get priority in the event of reusing an option name. -- func_run_hooks func_parse_options ${1+"$@"} -- -- # Adjust func_parse_options positional parameters to match -- eval set dummy "$func_run_hooks_result"; shift -- -- # Break out of the loop if we already parsed every option. -- test $# -gt 0 || break -- -- _G_opt=$1 -- shift -- case $_G_opt in -- --debug|-x) debug_cmd='set -x' -- func_echo "enabling shell trace mode" -- $debug_cmd -- ;; -- -- --no-warnings|--no-warning|--no-warn) -- set dummy --warnings none ${1+"$@"} -- shift -- ;; -- -- --warnings|--warning|-W) -- test $# = 0 && func_missing_arg $_G_opt && break -- case " $warning_categories $1" in -- *" $1 "*) -- # trailing space prevents matching last $1 above -- func_append_uniq opt_warning_types " $1" -- ;; -- *all) -- opt_warning_types=$warning_categories -- ;; -- *none) -- opt_warning_types=none -- warning_func=: -- ;; -- *error) -- opt_warning_types=$warning_categories -- warning_func=func_fatal_error -- ;; -- *) -- func_fatal_error \ -- "unsupported warning category: '$1'" -- ;; -- esac -- shift -- ;; -- -- --verbose|-v) opt_verbose=: ;; -- --version) func_version ;; -- -\?|-h) func_usage ;; -- --help) func_help ;; -- -- # Separate optargs to long options (plugins may need this): -- --*=*) func_split_equals "$_G_opt" -- set dummy "$func_split_equals_lhs" \ -- "$func_split_equals_rhs" ${1+"$@"} -- shift -- ;; -- -- # Separate optargs to short options: -- -W*) -- func_split_short_opt "$_G_opt" -- set dummy "$func_split_short_opt_name" \ -- "$func_split_short_opt_arg" ${1+"$@"} -- shift -- ;; -- -- # Separate non-argument short options: -- -\?*|-h*|-v*|-x*) -- func_split_short_opt "$_G_opt" -- set dummy "$func_split_short_opt_name" \ -- "-$func_split_short_opt_arg" ${1+"$@"} -- shift -- ;; -- -- --) break ;; -- -*) func_fatal_help "unrecognised option: '$_G_opt'" ;; -- *) set dummy "$_G_opt" ${1+"$@"}; shift; break ;; -- esac -- done -- -- # save modified positional parameters for caller -- func_quote_for_eval ${1+"$@"} -- func_parse_options_result=$func_quote_for_eval_result -+ case $1 in -+ [0-9]* | *[!a-zA-Z0-9_]*) -+ func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'` -+ ;; -+ * ) -+ func_tr_sh_result=$1 -+ ;; -+ esac - } - - --# func_validate_options [ARG]... --# ------------------------------ --# Perform any sanity checks on option settings and/or unconsumed --# arguments. --func_hookable func_validate_options --func_validate_options () -+# func_version -+# Echo version message to standard output and exit. -+func_version () - { -- $debug_cmd -- -- # Display all warnings if -W was not given. -- test -n "$opt_warning_types" || opt_warning_types=" $warning_categories" -- -- func_run_hooks func_validate_options ${1+"$@"} -+ $opt_debug - -- # Bail if the options were screwed! -- $exit_cmd $EXIT_FAILURE -- -- # save modified positional parameters for caller -- func_validate_options_result=$func_run_hooks_result -+ $SED -n '/(C)/!b go -+ :more -+ /\./!{ -+ N -+ s/\n# / / -+ b more -+ } -+ :go -+ /^# '$PROGRAM' (GNU /,/# warranty; / { -+ s/^# // -+ s/^# *$// -+ s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/ -+ p -+ }' < "$progpath" -+ exit $? - } - -- -- --## ----------------- ## --## Helper functions. ## --## ----------------- ## -- --# This section contains the helper functions used by the rest of the --# hookable option parser framework in ascii-betical order. -- -- --# func_fatal_help ARG... --# ---------------------- --# Echo program name prefixed message to standard error, followed by --# a help hint, and exit. --func_fatal_help () -+# func_usage -+# Echo short help message to standard output and exit. -+func_usage () - { -- $debug_cmd -+ $opt_debug - -- eval \$ECHO \""Usage: $usage"\" -- eval \$ECHO \""$fatal_help"\" -- func_error ${1+"$@"} -- exit $EXIT_FAILURE -+ $SED -n '/^# Usage:/,/^# *.*--help/ { -+ s/^# // -+ s/^# *$// -+ s/\$progname/'$progname'/ -+ p -+ }' < "$progpath" -+ echo -+ $ECHO "run \`$progname --help | more' for full usage" -+ exit $? - } - -- --# func_help --# --------- --# Echo long help message to standard output and exit. -+# func_help [NOEXIT] -+# Echo long help message to standard output and exit, -+# unless 'noexit' is passed as argument. - func_help () - { -- $debug_cmd -+ $opt_debug - -- func_usage_message -- $ECHO "$long_help_message" -- exit 0 -+ $SED -n '/^# Usage:/,/# Report bugs to/ { -+ :print -+ s/^# // -+ s/^# *$// -+ s*\$progname*'$progname'* -+ s*\$host*'"$host"'* -+ s*\$SHELL*'"$SHELL"'* -+ s*\$LTCC*'"$LTCC"'* -+ s*\$LTCFLAGS*'"$LTCFLAGS"'* -+ s*\$LD*'"$LD"'* -+ s/\$with_gnu_ld/'"$with_gnu_ld"'/ -+ s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/ -+ s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/ -+ p -+ d -+ } -+ /^# .* home page:/b print -+ /^# General help using/b print -+ ' < "$progpath" -+ ret=$? -+ if test -z "$1"; then -+ exit $ret -+ fi - } - -- --# func_missing_arg ARGNAME --# ------------------------ -+# func_missing_arg argname - # Echo program name prefixed message to standard error and set global - # exit_cmd. - func_missing_arg () - { -- $debug_cmd -+ $opt_debug - -- func_error "Missing argument for '$1'." -+ func_error "missing argument for $1." - exit_cmd=exit - } - - --# func_split_equals STRING --# ------------------------ --# Set func_split_equals_lhs and func_split_equals_rhs shell variables after --# splitting STRING at the '=' sign. --test -z "$_G_HAVE_XSI_OPS" \ -- && (eval 'x=a/b/c; -- test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \ -- && _G_HAVE_XSI_OPS=yes -- --if test yes = "$_G_HAVE_XSI_OPS" --then -- # This is an XSI compatible shell, allowing a faster implementation... -- eval 'func_split_equals () -- { -- $debug_cmd -- -- func_split_equals_lhs=${1%%=*} -- func_split_equals_rhs=${1#*=} -- test "x$func_split_equals_lhs" = "x$1" \ -- && func_split_equals_rhs= -- }' --else -- # ...otherwise fall back to using expr, which is often a shell builtin. -- func_split_equals () -- { -- $debug_cmd -- -- func_split_equals_lhs=`expr "x$1" : 'x\([^=]*\)'` -- func_split_equals_rhs= -- test "x$func_split_equals_lhs" = "x$1" \ -- || func_split_equals_rhs=`expr "x$1" : 'x[^=]*=\(.*\)$'` -- } --fi #func_split_equals -- -- --# func_split_short_opt SHORTOPT --# ----------------------------- -+# func_split_short_opt shortopt - # Set func_split_short_opt_name and func_split_short_opt_arg shell - # variables after splitting SHORTOPT after the 2nd character. --if test yes = "$_G_HAVE_XSI_OPS" --then -- # This is an XSI compatible shell, allowing a faster implementation... -- eval 'func_split_short_opt () -- { -- $debug_cmd -- -- func_split_short_opt_arg=${1#??} -- func_split_short_opt_name=${1%"$func_split_short_opt_arg"} -- }' --else -- # ...otherwise fall back to using expr, which is often a shell builtin. -- func_split_short_opt () -- { -- $debug_cmd -- -- func_split_short_opt_name=`expr "x$1" : 'x-\(.\)'` -- func_split_short_opt_arg=`expr "x$1" : 'x-.\(.*\)$'` -- } --fi #func_split_short_opt -- -- --# func_usage --# ---------- --# Echo short help message to standard output and exit. --func_usage () -+func_split_short_opt () - { -- $debug_cmd -+ my_sed_short_opt='1s/^\(..\).*$/\1/;q' -+ my_sed_short_rest='1s/^..\(.*\)$/\1/;q' - -- func_usage_message -- $ECHO "Run '$progname --help |${PAGER-more}' for full usage" -- exit 0 --} -+ func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"` -+ func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"` -+} # func_split_short_opt may be replaced by extended shell implementation - - --# func_usage_message --# ------------------ --# Echo short help message to standard output. --func_usage_message () -+# func_split_long_opt longopt -+# Set func_split_long_opt_name and func_split_long_opt_arg shell -+# variables after splitting LONGOPT at the `=' sign. -+func_split_long_opt () - { -- $debug_cmd -+ my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q' -+ my_sed_long_arg='1s/^--[^=]*=//' - -- eval \$ECHO \""Usage: $usage"\" -- echo -- $SED -n 's|^# || -- /^Written by/{ -- x;p;x -- } -- h -- /^Written by/q' < "$progpath" -- echo -- eval \$ECHO \""$usage_message"\" --} -+ func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"` -+ func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"` -+} # func_split_long_opt may be replaced by extended shell implementation - -+exit_cmd=: - --# func_version --# ------------ --# Echo version message to standard output and exit. --func_version () --{ -- $debug_cmd - -- printf '%s\n' "$progname $scriptversion" -- $SED -n ' -- /(C)/!b go -- :more -- /\./!{ -- N -- s|\n# | | -- b more -- } -- :go -- /^# Written by /,/# warranty; / { -- s|^# || -- s|^# *$|| -- s|\((C)\)[ 0-9,-]*[ ,-]\([1-9][0-9]* \)|\1 \2| -- p -- } -- /^# Written by / { -- s|^# || -- p -- } -- /^warranty; /q' < "$progpath" - -- exit $? --} - - --# Local variables: --# mode: shell-script --# sh-indentation: 2 --# eval: (add-hook 'before-save-hook 'time-stamp) --# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC" --# time-stamp-time-zone: "UTC" --# End: -+magic="%%%MAGIC variable%%%" -+magic_exe="%%%MAGIC EXE variable%%%" - --# Set a version string. --scriptversion='(GNU libtool) 2.4.6' -+# Global variables. -+nonopt= -+preserve_args= -+lo2o="s/\\.lo\$/.${objext}/" -+o2lo="s/\\.${objext}\$/.lo/" -+extracted_archives= -+extracted_serial=0 - -+# If this variable is set in any of the actions, the command in it -+# will be execed at the end. This prevents here-documents from being -+# left over by shells. -+exec_cmd= - --# func_echo ARG... --# ---------------- --# Libtool also displays the current mode in messages, so override --# funclib.sh func_echo with this custom definition. --func_echo () -+# func_append var value -+# Append VALUE to the end of shell variable VAR. -+func_append () - { -- $debug_cmd -- -- _G_message=$* -- -- func_echo_IFS=$IFS -- IFS=$nl -- for _G_line in $_G_message; do -- IFS=$func_echo_IFS -- $ECHO "$progname${opt_mode+: $opt_mode}: $_G_line" -- done -- IFS=$func_echo_IFS --} -+ eval "${1}=\$${1}\${2}" -+} # func_append may be replaced by extended shell implementation - -- --# func_warning ARG... --# ------------------- --# Libtool warnings are not categorized, so override funclib.sh --# func_warning with this simpler definition. --func_warning () -+# func_append_quoted var value -+# Quote VALUE and append to the end of shell variable VAR, separated -+# by a space. -+func_append_quoted () - { -- $debug_cmd -+ func_quote_for_eval "${2}" -+ eval "${1}=\$${1}\\ \$func_quote_for_eval_result" -+} # func_append_quoted may be replaced by extended shell implementation - -- $warning_func ${1+"$@"} --} -- -- --## ---------------- ## --## Options parsing. ## --## ---------------- ## -- --# Hook in the functions to make sure our own options are parsed during --# the option parsing loop. -- --usage='$progpath [OPTION]... [MODE-ARG]...' -- --# Short help message in response to '-h'. --usage_message="Options: -- --config show all configuration variables -- --debug enable verbose shell tracing -- -n, --dry-run display commands without modifying any files -- --features display basic configuration information and exit -- --mode=MODE use operation mode MODE -- --no-warnings equivalent to '-Wnone' -- --preserve-dup-deps don't remove duplicate dependency libraries -- --quiet, --silent don't print informational messages -- --tag=TAG use configuration variables from tag TAG -- -v, --verbose print more informational messages than default -- --version print version information -- -W, --warnings=CATEGORY report the warnings falling in CATEGORY [all] -- -h, --help, --help-all print short, long, or detailed help message --" - --# Additional text appended to 'usage_message' in response to '--help'. --func_help () -+# func_arith arithmetic-term... -+func_arith () - { -- $debug_cmd -- -- func_usage_message -- $ECHO "$long_help_message -- --MODE must be one of the following: -- -- clean remove files from the build directory -- compile compile a source file into a libtool object -- execute automatically set library path, then run a program -- finish complete the installation of libtool libraries -- install install libraries or executables -- link create a library or an executable -- uninstall remove libraries from an installed directory -- --MODE-ARGS vary depending on the MODE. When passed as first option, --'--mode=MODE' may be abbreviated as 'MODE' or a unique abbreviation of that. --Try '$progname --help --mode=MODE' for a more detailed description of MODE. -- --When reporting a bug, please describe a test case to reproduce it and --include the following information: -- -- host-triplet: $host -- shell: $SHELL -- compiler: $LTCC -- compiler flags: $LTCFLAGS -- linker: $LD (gnu? $with_gnu_ld) -- version: $progname (GNU libtool) 2.4.6 -- automake: `($AUTOMAKE --version) 2>/dev/null |$SED 1q` -- autoconf: `($AUTOCONF --version) 2>/dev/null |$SED 1q` -- --Report bugs to <bug-libtool@gnu.org>. --GNU libtool home page: <http://www.gnu.org/software/libtool/>. --General help using GNU software: <http://www.gnu.org/gethelp/>." -- exit 0 --} -+ func_arith_result=`expr "${@}"` -+} # func_arith may be replaced by extended shell implementation - - --# func_lo2o OBJECT-NAME --# --------------------- --# Transform OBJECT-NAME from a '.lo' suffix to the platform specific --# object suffix. -+# func_len string -+# STRING may not start with a hyphen. -+func_len () -+{ -+ func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len` -+} # func_len may be replaced by extended shell implementation - --lo2o=s/\\.lo\$/.$objext/ --o2lo=s/\\.$objext\$/.lo/ - --if test yes = "$_G_HAVE_XSI_OPS"; then -- eval 'func_lo2o () -- { -- case $1 in -- *.lo) func_lo2o_result=${1%.lo}.$objext ;; -- * ) func_lo2o_result=$1 ;; -- esac -- }' -+# func_lo2o object -+func_lo2o () -+{ -+ func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` -+} # func_lo2o may be replaced by extended shell implementation - -- # func_xform LIBOBJ-OR-SOURCE -- # --------------------------- -- # Transform LIBOBJ-OR-SOURCE from a '.o' or '.c' (or otherwise) -- # suffix to a '.lo' libtool-object suffix. -- eval 'func_xform () -- { -- func_xform_result=${1%.*}.lo -- }' --else -- # ...otherwise fall back to using sed. -- func_lo2o () -- { -- func_lo2o_result=`$ECHO "$1" | $SED "$lo2o"` -- } - -- func_xform () -- { -- func_xform_result=`$ECHO "$1" | $SED 's|\.[^.]*$|.lo|'` -- } --fi -+# func_xform libobj-or-source -+func_xform () -+{ -+ func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` -+} # func_xform may be replaced by extended shell implementation - - --# func_fatal_configuration ARG... --# ------------------------------- -+# func_fatal_configuration arg... - # Echo program name prefixed message to standard error, followed by - # a configuration failure hint, and exit. - func_fatal_configuration () - { -- func__fatal_error ${1+"$@"} \ -- "See the $PACKAGE documentation for more information." \ -- "Fatal configuration error." -+ func_error ${1+"$@"} -+ func_error "See the $PACKAGE documentation for more information." -+ func_fatal_error "Fatal configuration error." - } - - - # func_config --# ----------- - # Display the configuration for all the tags in this script. - func_config () - { -@@ -2149,19 +915,17 @@ - exit $? - } - -- - # func_features --# ------------- - # Display the features supported by this script. - func_features () - { - echo "host: $host" -- if test yes = "$build_libtool_libs"; then -+ if test "$build_libtool_libs" = yes; then - echo "enable shared libraries" - else - echo "disable shared libraries" - fi -- if test yes = "$build_old_libs"; then -+ if test "$build_old_libs" = yes; then - echo "enable static libraries" - else - echo "disable static libraries" -@@ -2170,297 +934,289 @@ - exit $? - } - -- --# func_enable_tag TAGNAME --# ----------------------- -+# func_enable_tag tagname - # Verify that TAGNAME is valid, and either flag an error and exit, or - # enable the TAGNAME tag. We also add TAGNAME to the global $taglist - # variable here. - func_enable_tag () - { -- # Global variable: -- tagname=$1 -+ # Global variable: -+ tagname="$1" - -- re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$" -- re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$" -- sed_extractcf=/$re_begincf/,/$re_endcf/p -- -- # Validate tagname. -- case $tagname in -- *[!-_A-Za-z0-9,/]*) -- func_fatal_error "invalid tag name: $tagname" -- ;; -- esac -+ re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$" -+ re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$" -+ sed_extractcf="/$re_begincf/,/$re_endcf/p" -+ -+ # Validate tagname. -+ case $tagname in -+ *[!-_A-Za-z0-9,/]*) -+ func_fatal_error "invalid tag name: $tagname" -+ ;; -+ esac - -- # Don't test for the "default" C tag, as we know it's -- # there but not specially marked. -- case $tagname in -- CC) ;; -+ # Don't test for the "default" C tag, as we know it's -+ # there but not specially marked. -+ case $tagname in -+ CC) ;; - *) -- if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then -- taglist="$taglist $tagname" -+ if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then -+ taglist="$taglist $tagname" - -- # Evaluate the configuration. Be careful to quote the path -- # and the sed script, to avoid splitting on whitespace, but -- # also don't use non-portable quotes within backquotes within -- # quotes we have to do it in 2 steps: -- extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` -- eval "$extractedcf" -- else -- func_error "ignoring unknown tag $tagname" -- fi -- ;; -- esac -+ # Evaluate the configuration. Be careful to quote the path -+ # and the sed script, to avoid splitting on whitespace, but -+ # also don't use non-portable quotes within backquotes within -+ # quotes we have to do it in 2 steps: -+ extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` -+ eval "$extractedcf" -+ else -+ func_error "ignoring unknown tag $tagname" -+ fi -+ ;; -+ esac - } - -- - # func_check_version_match --# ------------------------ - # Ensure that we are using m4 macros, and libtool script from the same - # release of libtool. - func_check_version_match () - { -- if test "$package_revision" != "$macro_revision"; then -- if test "$VERSION" != "$macro_version"; then -- if test -z "$macro_version"; then -- cat >&2 <<_LT_EOF -+ if test "$package_revision" != "$macro_revision"; then -+ if test "$VERSION" != "$macro_version"; then -+ if test -z "$macro_version"; then -+ cat >&2 <<_LT_EOF - $progname: Version mismatch error. This is $PACKAGE $VERSION, but the - $progname: definition of this LT_INIT comes from an older release. - $progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION - $progname: and run autoconf again. - _LT_EOF -- else -- cat >&2 <<_LT_EOF -+ else -+ cat >&2 <<_LT_EOF - $progname: Version mismatch error. This is $PACKAGE $VERSION, but the - $progname: definition of this LT_INIT comes from $PACKAGE $macro_version. - $progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION - $progname: and run autoconf again. - _LT_EOF -- fi -- else -- cat >&2 <<_LT_EOF -+ fi -+ else -+ cat >&2 <<_LT_EOF - $progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, - $progname: but the definition of this LT_INIT comes from revision $macro_revision. - $progname: You should recreate aclocal.m4 with macros from revision $package_revision - $progname: of $PACKAGE $VERSION and run autoconf again. - _LT_EOF -- fi -- -- exit $EXIT_MISMATCH - fi -+ -+ exit $EXIT_MISMATCH -+ fi - } - - --# libtool_options_prep [ARG]... --# ----------------------------- --# Preparation for options parsed by libtool. --libtool_options_prep () --{ -- $debug_mode -- -- # Option defaults: -- opt_config=false -- opt_dlopen= -- opt_dry_run=false -- opt_help=false -- opt_mode= -- opt_preserve_dup_deps=false -- opt_quiet=false -+# Shorthand for --mode=foo, only valid as the first argument -+case $1 in -+clean|clea|cle|cl) -+ shift; set dummy --mode clean ${1+"$@"}; shift -+ ;; -+compile|compil|compi|comp|com|co|c) -+ shift; set dummy --mode compile ${1+"$@"}; shift -+ ;; -+execute|execut|execu|exec|exe|ex|e) -+ shift; set dummy --mode execute ${1+"$@"}; shift -+ ;; -+finish|finis|fini|fin|fi|f) -+ shift; set dummy --mode finish ${1+"$@"}; shift -+ ;; -+install|instal|insta|inst|ins|in|i) -+ shift; set dummy --mode install ${1+"$@"}; shift -+ ;; -+link|lin|li|l) -+ shift; set dummy --mode link ${1+"$@"}; shift -+ ;; -+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) -+ shift; set dummy --mode uninstall ${1+"$@"}; shift -+ ;; -+esac - -- nonopt= -- preserve_args= - -- # Shorthand for --mode=foo, only valid as the first argument -- case $1 in -- clean|clea|cle|cl) -- shift; set dummy --mode clean ${1+"$@"}; shift -- ;; -- compile|compil|compi|comp|com|co|c) -- shift; set dummy --mode compile ${1+"$@"}; shift -- ;; -- execute|execut|execu|exec|exe|ex|e) -- shift; set dummy --mode execute ${1+"$@"}; shift -- ;; -- finish|finis|fini|fin|fi|f) -- shift; set dummy --mode finish ${1+"$@"}; shift -- ;; -- install|instal|insta|inst|ins|in|i) -- shift; set dummy --mode install ${1+"$@"}; shift -- ;; -- link|lin|li|l) -- shift; set dummy --mode link ${1+"$@"}; shift -- ;; -- uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) -- shift; set dummy --mode uninstall ${1+"$@"}; shift -- ;; -- esac - -- # Pass back the list of options. -- func_quote_for_eval ${1+"$@"} -- libtool_options_prep_result=$func_quote_for_eval_result --} --func_add_hook func_options_prep libtool_options_prep -+# Option defaults: -+opt_debug=: -+opt_dry_run=false -+opt_config=false -+opt_preserve_dup_deps=false -+opt_features=false -+opt_finish=false -+opt_help=false -+opt_help_all=false -+opt_silent=: -+opt_warning=: -+opt_verbose=: -+opt_silent=false -+opt_verbose=false - - --# libtool_parse_options [ARG]... --# --------------------------------- --# Provide handling for libtool specific options. --libtool_parse_options () -+# Parse options once, thoroughly. This comes as soon as possible in the -+# script to make things like `--version' happen as quickly as we can. - { -- $debug_cmd -- -- # Perform our own loop to consume as many options as possible in -- # each iteration. -- while test $# -gt 0; do -- _G_opt=$1 -- shift -- case $_G_opt in -- --dry-run|--dryrun|-n) -- opt_dry_run=: -- ;; -- -- --config) func_config ;; -- -- --dlopen|-dlopen) -- opt_dlopen="${opt_dlopen+$opt_dlopen --}$1" -- shift -- ;; -- -- --preserve-dup-deps) -- opt_preserve_dup_deps=: ;; -- -- --features) func_features ;; -- -- --finish) set dummy --mode finish ${1+"$@"}; shift ;; -- -- --help) opt_help=: ;; -- -- --help-all) opt_help=': help-all' ;; -- -- --mode) test $# = 0 && func_missing_arg $_G_opt && break -- opt_mode=$1 -- case $1 in -- # Valid mode arguments: -- clean|compile|execute|finish|install|link|relink|uninstall) ;; -- -- # Catch anything else as an error -- *) func_error "invalid argument for $_G_opt" -- exit_cmd=exit -- break -- ;; -- esac -- shift -- ;; -- -- --no-silent|--no-quiet) -- opt_quiet=false -- func_append preserve_args " $_G_opt" -- ;; -- -- --no-warnings|--no-warning|--no-warn) -- opt_warning=false -- func_append preserve_args " $_G_opt" -- ;; -- -- --no-verbose) -- opt_verbose=false -- func_append preserve_args " $_G_opt" -- ;; -- -- --silent|--quiet) -- opt_quiet=: -- opt_verbose=false -- func_append preserve_args " $_G_opt" -- ;; -- -- --tag) test $# = 0 && func_missing_arg $_G_opt && break -- opt_tag=$1 -- func_append preserve_args " $_G_opt $1" -- func_enable_tag "$1" -- shift -- ;; -- -- --verbose|-v) opt_quiet=false -- opt_verbose=: -- func_append preserve_args " $_G_opt" -- ;; -+ # this just eases exit handling -+ while test $# -gt 0; do -+ opt="$1" -+ shift -+ case $opt in -+ --debug|-x) opt_debug='set -x' -+ func_echo "enabling shell trace mode" -+ $opt_debug -+ ;; -+ --dry-run|--dryrun|-n) -+ opt_dry_run=: -+ ;; -+ --config) -+ opt_config=: -+func_config -+ ;; -+ --dlopen|-dlopen) -+ optarg="$1" -+ opt_dlopen="${opt_dlopen+$opt_dlopen -+}$optarg" -+ shift -+ ;; -+ --preserve-dup-deps) -+ opt_preserve_dup_deps=: -+ ;; -+ --features) -+ opt_features=: -+func_features -+ ;; -+ --finish) -+ opt_finish=: -+set dummy --mode finish ${1+"$@"}; shift -+ ;; -+ --help) -+ opt_help=: -+ ;; -+ --help-all) -+ opt_help_all=: -+opt_help=': help-all' -+ ;; -+ --mode) -+ test $# = 0 && func_missing_arg $opt && break -+ optarg="$1" -+ opt_mode="$optarg" -+case $optarg in -+ # Valid mode arguments: -+ clean|compile|execute|finish|install|link|relink|uninstall) ;; -+ -+ # Catch anything else as an error -+ *) func_error "invalid argument for $opt" -+ exit_cmd=exit -+ break -+ ;; -+esac -+ shift -+ ;; -+ --no-silent|--no-quiet) -+ opt_silent=false -+func_append preserve_args " $opt" -+ ;; -+ --no-warning|--no-warn) -+ opt_warning=false -+func_append preserve_args " $opt" -+ ;; -+ --no-verbose) -+ opt_verbose=false -+func_append preserve_args " $opt" -+ ;; -+ --silent|--quiet) -+ opt_silent=: -+func_append preserve_args " $opt" -+ opt_verbose=false -+ ;; -+ --verbose|-v) -+ opt_verbose=: -+func_append preserve_args " $opt" -+opt_silent=false -+ ;; -+ --tag) -+ test $# = 0 && func_missing_arg $opt && break -+ optarg="$1" -+ opt_tag="$optarg" -+func_append preserve_args " $opt $optarg" -+func_enable_tag "$optarg" -+ shift -+ ;; -+ -+ -\?|-h) func_usage ;; -+ --help) func_help ;; -+ --version) func_version ;; -+ -+ # Separate optargs to long options: -+ --*=*) -+ func_split_long_opt "$opt" -+ set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"} -+ shift -+ ;; -+ -+ # Separate non-argument short options: -+ -\?*|-h*|-n*|-v*) -+ func_split_short_opt "$opt" -+ set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"} -+ shift -+ ;; -+ -+ --) break ;; -+ -*) func_fatal_help "unrecognized option \`$opt'" ;; -+ *) set dummy "$opt" ${1+"$@"}; shift; break ;; -+ esac -+ done - -- # An option not handled by this hook function: -- *) set dummy "$_G_opt" ${1+"$@"}; shift; break ;; -- esac -- done -+ # Validate options: - -+ # save first non-option argument -+ if test "$#" -gt 0; then -+ nonopt="$opt" -+ shift -+ fi - -- # save modified positional parameters for caller -- func_quote_for_eval ${1+"$@"} -- libtool_parse_options_result=$func_quote_for_eval_result --} --func_add_hook func_parse_options libtool_parse_options -+ # preserve --debug -+ test "$opt_debug" = : || func_append preserve_args " --debug" - -+ case $host in -+ *cygwin* | *mingw* | *pw32* | *cegcc*) -+ # don't eliminate duplications in $postdeps and $predeps -+ opt_duplicate_compiler_generated_deps=: -+ ;; -+ *) -+ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps -+ ;; -+ esac - -+ $opt_help || { -+ # Sanity checks first: -+ func_check_version_match - --# libtool_validate_options [ARG]... --# --------------------------------- --# Perform any sanity checks on option settings and/or unconsumed --# arguments. --libtool_validate_options () --{ -- # save first non-option argument -- if test 0 -lt $#; then -- nonopt=$1 -- shift -+ if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then -+ func_fatal_configuration "not configured to build any kind of library" - fi - -- # preserve --debug -- test : = "$debug_cmd" || func_append preserve_args " --debug" -+ # Darwin sucks -+ eval std_shrext=\"$shrext_cmds\" - -- case $host in -- # Solaris2 added to fix http://debbugs.gnu.org/cgi/bugreport.cgi?bug=16452 -- # see also: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=59788 -- *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* | *os2*) -- # don't eliminate duplications in $postdeps and $predeps -- opt_duplicate_compiler_generated_deps=: -- ;; -- *) -- opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps -- ;; -- esac -+ # Only execute mode is allowed to have -dlopen flags. -+ if test -n "$opt_dlopen" && test "$opt_mode" != execute; then -+ func_error "unrecognized option \`-dlopen'" -+ $ECHO "$help" 1>&2 -+ exit $EXIT_FAILURE -+ fi - -- $opt_help || { -- # Sanity checks first: -- func_check_version_match -- -- test yes != "$build_libtool_libs" \ -- && test yes != "$build_old_libs" \ -- && func_fatal_configuration "not configured to build any kind of library" -- -- # Darwin sucks -- eval std_shrext=\"$shrext_cmds\" -- -- # Only execute mode is allowed to have -dlopen flags. -- if test -n "$opt_dlopen" && test execute != "$opt_mode"; then -- func_error "unrecognized option '-dlopen'" -- $ECHO "$help" 1>&2 -- exit $EXIT_FAILURE -- fi -- -- # Change the help message to a mode-specific one. -- generic_help=$help -- help="Try '$progname --help --mode=$opt_mode' for more information." -- } -+ # Change the help message to a mode-specific one. -+ generic_help="$help" -+ help="Try \`$progname --help --mode=$opt_mode' for more information." -+ } - -- # Pass back the unparsed argument list -- func_quote_for_eval ${1+"$@"} -- libtool_validate_options_result=$func_quote_for_eval_result --} --func_add_hook func_validate_options libtool_validate_options - -+ # Bail if the options were screwed -+ $exit_cmd $EXIT_FAILURE -+} - --# Process options as early as possible so that --help and --version --# can return quickly. --func_options ${1+"$@"} --eval set dummy "$func_options_result"; shift - - - -@@ -2468,52 +1224,24 @@ - ## Main. ## - ## ----------- ## - --magic='%%%MAGIC variable%%%' --magic_exe='%%%MAGIC EXE variable%%%' -- --# Global variables. --extracted_archives= --extracted_serial=0 -- --# If this variable is set in any of the actions, the command in it --# will be execed at the end. This prevents here-documents from being --# left over by shells. --exec_cmd= -- -- --# A function that is used when there is no print builtin or printf. --func_fallback_echo () --{ -- eval 'cat <<_LTECHO_EOF --$1 --_LTECHO_EOF' --} -- --# func_generated_by_libtool --# True iff stdin has been generated by Libtool. This function is only --# a basic sanity check; it will hardly flush out determined imposters. --func_generated_by_libtool_p () --{ -- $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 --} -- - # func_lalib_p file --# True iff FILE is a libtool '.la' library or '.lo' object file. -+# True iff FILE is a libtool `.la' library or `.lo' object file. - # This function is only a basic sanity check; it will hardly flush out - # determined imposters. - func_lalib_p () - { - test -f "$1" && -- $SED -e 4q "$1" 2>/dev/null | func_generated_by_libtool_p -+ $SED -e 4q "$1" 2>/dev/null \ -+ | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 - } - - # func_lalib_unsafe_p file --# True iff FILE is a libtool '.la' library or '.lo' object file. -+# True iff FILE is a libtool `.la' library or `.lo' object file. - # This function implements the same check as func_lalib_p without - # resorting to external programs. To this end, it redirects stdin and - # closes it afterwards, without saving the original file descriptor. - # As a safety measure, use it only where a negative result would be --# fatal anyway. Works if 'file' does not exist. -+# fatal anyway. Works if `file' does not exist. - func_lalib_unsafe_p () - { - lalib_p=no -@@ -2521,13 +1249,13 @@ - for lalib_p_l in 1 2 3 4 - do - read lalib_p_line -- case $lalib_p_line in -+ case "$lalib_p_line" in - \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;; - esac - done - exec 0<&5 5<&- - fi -- test yes = "$lalib_p" -+ test "$lalib_p" = yes - } - - # func_ltwrapper_script_p file -@@ -2536,8 +1264,7 @@ - # determined imposters. - func_ltwrapper_script_p () - { -- test -f "$1" && -- $lt_truncate_bin < "$1" 2>/dev/null | func_generated_by_libtool_p -+ func_lalib_p "$1" - } - - # func_ltwrapper_executable_p file -@@ -2562,7 +1289,7 @@ - { - func_dirname_and_basename "$1" "" "." - func_stripname '' '.exe' "$func_basename_result" -- func_ltwrapper_scriptname_result=$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper -+ func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" - } - - # func_ltwrapper_p file -@@ -2581,13 +1308,11 @@ - # FAIL_CMD may read-access the current command in variable CMD! - func_execute_cmds () - { -- $debug_cmd -- -+ $opt_debug - save_ifs=$IFS; IFS='~' - for cmd in $1; do -- IFS=$sp$nl -- eval cmd=\"$cmd\" - IFS=$save_ifs -+ eval cmd=\"$cmd\" - func_show_eval "$cmd" "${2-:}" - done - IFS=$save_ifs -@@ -2599,11 +1324,10 @@ - # Note that it is not necessary on cygwin/mingw to append a dot to - # FILE even if both FILE and FILE.exe exist: automatic-append-.exe - # behavior happens only for exec(3), not for open(2)! Also, sourcing --# 'FILE.' does not work on cygwin managed mounts. -+# `FILE.' does not work on cygwin managed mounts. - func_source () - { -- $debug_cmd -- -+ $opt_debug - case $1 in - */* | *\\*) . "$1" ;; - *) . "./$1" ;; -@@ -2630,10 +1354,10 @@ - # store the result into func_replace_sysroot_result. - func_replace_sysroot () - { -- case $lt_sysroot:$1 in -+ case "$lt_sysroot:$1" in - ?*:"$lt_sysroot"*) - func_stripname "$lt_sysroot" '' "$1" -- func_replace_sysroot_result='='$func_stripname_result -+ func_replace_sysroot_result="=$func_stripname_result" - ;; - *) - # Including no sysroot. -@@ -2650,8 +1374,7 @@ - # arg is usually of the form 'gcc ...' - func_infer_tag () - { -- $debug_cmd -- -+ $opt_debug - if test -n "$available_tags" && test -z "$tagname"; then - CC_quoted= - for arg in $CC; do -@@ -2670,7 +1393,7 @@ - for z in $available_tags; do - if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then - # Evaluate the configuration. -- eval "`$SED -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" -+ eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" - CC_quoted= - for arg in $CC; do - # Double-quote args containing other shell metacharacters. -@@ -2695,7 +1418,7 @@ - # line option must be used. - if test -z "$tagname"; then - func_echo "unable to infer tagged configuration" -- func_fatal_error "specify a tag with '--tag'" -+ func_fatal_error "specify a tag with \`--tag'" - # else - # func_verbose "using $tagname tagged configuration" - fi -@@ -2711,15 +1434,15 @@ - # but don't create it if we're doing a dry run. - func_write_libtool_object () - { -- write_libobj=$1 -- if test yes = "$build_libtool_libs"; then -- write_lobj=\'$2\' -+ write_libobj=${1} -+ if test "$build_libtool_libs" = yes; then -+ write_lobj=\'${2}\' - else - write_lobj=none - fi - -- if test yes = "$build_old_libs"; then -- write_oldobj=\'$3\' -+ if test "$build_old_libs" = yes; then -+ write_oldobj=\'${3}\' - else - write_oldobj=none - fi -@@ -2727,7 +1450,7 @@ - $opt_dry_run || { - cat >${write_libobj}T <<EOF - # $write_libobj - a libtool object file --# Generated by $PROGRAM (GNU $PACKAGE) $VERSION -+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - # - # Please DO NOT delete this file! - # It is necessary for linking the library. -@@ -2739,7 +1462,7 @@ - non_pic_object=$write_oldobj - - EOF -- $MV "${write_libobj}T" "$write_libobj" -+ $MV "${write_libobj}T" "${write_libobj}" - } - } - -@@ -2759,9 +1482,8 @@ - # be empty on error (or when ARG is empty) - func_convert_core_file_wine_to_w32 () - { -- $debug_cmd -- -- func_convert_core_file_wine_to_w32_result=$1 -+ $opt_debug -+ func_convert_core_file_wine_to_w32_result="$1" - if test -n "$1"; then - # Unfortunately, winepath does not exit with a non-zero error code, so we - # are forced to check the contents of stdout. On the other hand, if the -@@ -2769,9 +1491,9 @@ - # *an error message* to stdout. So we must check for both error code of - # zero AND non-empty stdout, which explains the odd construction: - func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null` -- if test "$?" -eq 0 && test -n "$func_convert_core_file_wine_to_w32_tmp"; then -+ if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then - func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" | -- $SED -e "$sed_naive_backslashify"` -+ $SED -e "$lt_sed_naive_backslashify"` - else - func_convert_core_file_wine_to_w32_result= - fi -@@ -2792,19 +1514,18 @@ - # are convertible, then the result may be empty. - func_convert_core_path_wine_to_w32 () - { -- $debug_cmd -- -+ $opt_debug - # unfortunately, winepath doesn't convert paths, only file names -- func_convert_core_path_wine_to_w32_result= -+ func_convert_core_path_wine_to_w32_result="" - if test -n "$1"; then - oldIFS=$IFS - IFS=: - for func_convert_core_path_wine_to_w32_f in $1; do - IFS=$oldIFS - func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f" -- if test -n "$func_convert_core_file_wine_to_w32_result"; then -+ if test -n "$func_convert_core_file_wine_to_w32_result" ; then - if test -z "$func_convert_core_path_wine_to_w32_result"; then -- func_convert_core_path_wine_to_w32_result=$func_convert_core_file_wine_to_w32_result -+ func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result" - else - func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result" - fi -@@ -2833,8 +1554,7 @@ - # environment variable; do not put it in $PATH. - func_cygpath () - { -- $debug_cmd -- -+ $opt_debug - if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then - func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null` - if test "$?" -ne 0; then -@@ -2843,7 +1563,7 @@ - fi - else - func_cygpath_result= -- func_error "LT_CYGPATH is empty or specifies non-existent file: '$LT_CYGPATH'" -+ func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'" - fi - } - #end: func_cygpath -@@ -2854,11 +1574,10 @@ - # result in func_convert_core_msys_to_w32_result. - func_convert_core_msys_to_w32 () - { -- $debug_cmd -- -+ $opt_debug - # awkward: cmd appends spaces to result - func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null | -- $SED -e 's/[ ]*$//' -e "$sed_naive_backslashify"` -+ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` - } - #end: func_convert_core_msys_to_w32 - -@@ -2869,14 +1588,13 @@ - # func_to_host_file_result to ARG1). - func_convert_file_check () - { -- $debug_cmd -- -- if test -z "$2" && test -n "$1"; then -+ $opt_debug -+ if test -z "$2" && test -n "$1" ; then - func_error "Could not determine host file name corresponding to" -- func_error " '$1'" -+ func_error " \`$1'" - func_error "Continuing, but uninstalled executables may not work." - # Fallback: -- func_to_host_file_result=$1 -+ func_to_host_file_result="$1" - fi - } - # end func_convert_file_check -@@ -2888,11 +1606,10 @@ - # func_to_host_file_result to a simplistic fallback value (see below). - func_convert_path_check () - { -- $debug_cmd -- -+ $opt_debug - if test -z "$4" && test -n "$3"; then - func_error "Could not determine the host path corresponding to" -- func_error " '$3'" -+ func_error " \`$3'" - func_error "Continuing, but uninstalled executables may not work." - # Fallback. This is a deliberately simplistic "conversion" and - # should not be "improved". See libtool.info. -@@ -2901,7 +1618,7 @@ - func_to_host_path_result=`echo "$3" | - $SED -e "$lt_replace_pathsep_chars"` - else -- func_to_host_path_result=$3 -+ func_to_host_path_result="$3" - fi - fi - } -@@ -2913,10 +1630,9 @@ - # and appending REPL if ORIG matches BACKPAT. - func_convert_path_front_back_pathsep () - { -- $debug_cmd -- -+ $opt_debug - case $4 in -- $1 ) func_to_host_path_result=$3$func_to_host_path_result -+ $1 ) func_to_host_path_result="$3$func_to_host_path_result" - ;; - esac - case $4 in -@@ -2930,7 +1646,7 @@ - ################################################## - # $build to $host FILE NAME CONVERSION FUNCTIONS # - ################################################## --# invoked via '$to_host_file_cmd ARG' -+# invoked via `$to_host_file_cmd ARG' - # - # In each case, ARG is the path to be converted from $build to $host format. - # Result will be available in $func_to_host_file_result. -@@ -2941,8 +1657,7 @@ - # in func_to_host_file_result. - func_to_host_file () - { -- $debug_cmd -- -+ $opt_debug - $to_host_file_cmd "$1" - } - # end func_to_host_file -@@ -2954,8 +1669,7 @@ - # in (the comma separated) LAZY, no conversion takes place. - func_to_tool_file () - { -- $debug_cmd -- -+ $opt_debug - case ,$2, in - *,"$to_tool_file_cmd",*) - func_to_tool_file_result=$1 -@@ -2973,7 +1687,7 @@ - # Copy ARG to func_to_host_file_result. - func_convert_file_noop () - { -- func_to_host_file_result=$1 -+ func_to_host_file_result="$1" - } - # end func_convert_file_noop - -@@ -2984,12 +1698,11 @@ - # func_to_host_file_result. - func_convert_file_msys_to_w32 () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - func_convert_core_msys_to_w32 "$1" -- func_to_host_file_result=$func_convert_core_msys_to_w32_result -+ func_to_host_file_result="$func_convert_core_msys_to_w32_result" - fi - func_convert_file_check "$1" "$func_to_host_file_result" - } -@@ -3001,9 +1714,8 @@ - # func_to_host_file_result. - func_convert_file_cygwin_to_w32 () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - # because $build is cygwin, we call "the" cygpath in $PATH; no need to use - # LT_CYGPATH in this case. -@@ -3019,12 +1731,11 @@ - # and a working winepath. Returns result in func_to_host_file_result. - func_convert_file_nix_to_w32 () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - func_convert_core_file_wine_to_w32 "$1" -- func_to_host_file_result=$func_convert_core_file_wine_to_w32_result -+ func_to_host_file_result="$func_convert_core_file_wine_to_w32_result" - fi - func_convert_file_check "$1" "$func_to_host_file_result" - } -@@ -3036,13 +1747,12 @@ - # Returns result in func_to_host_file_result. - func_convert_file_msys_to_cygwin () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - func_convert_core_msys_to_w32 "$1" - func_cygpath -u "$func_convert_core_msys_to_w32_result" -- func_to_host_file_result=$func_cygpath_result -+ func_to_host_file_result="$func_cygpath_result" - fi - func_convert_file_check "$1" "$func_to_host_file_result" - } -@@ -3055,14 +1765,13 @@ - # in func_to_host_file_result. - func_convert_file_nix_to_cygwin () - { -- $debug_cmd -- -- func_to_host_file_result=$1 -+ $opt_debug -+ func_to_host_file_result="$1" - if test -n "$1"; then - # convert from *nix to w32, then use cygpath to convert from w32 to cygwin. - func_convert_core_file_wine_to_w32 "$1" - func_cygpath -u "$func_convert_core_file_wine_to_w32_result" -- func_to_host_file_result=$func_cygpath_result -+ func_to_host_file_result="$func_cygpath_result" - fi - func_convert_file_check "$1" "$func_to_host_file_result" - } -@@ -3072,7 +1781,7 @@ - ############################################# - # $build to $host PATH CONVERSION FUNCTIONS # - ############################################# --# invoked via '$to_host_path_cmd ARG' -+# invoked via `$to_host_path_cmd ARG' - # - # In each case, ARG is the path to be converted from $build to $host format. - # The result will be available in $func_to_host_path_result. -@@ -3096,11 +1805,10 @@ - to_host_path_cmd= - func_init_to_host_path_cmd () - { -- $debug_cmd -- -+ $opt_debug - if test -z "$to_host_path_cmd"; then - func_stripname 'func_convert_file_' '' "$to_host_file_cmd" -- to_host_path_cmd=func_convert_path_$func_stripname_result -+ to_host_path_cmd="func_convert_path_${func_stripname_result}" - fi - } - -@@ -3110,8 +1818,7 @@ - # in func_to_host_path_result. - func_to_host_path () - { -- $debug_cmd -- -+ $opt_debug - func_init_to_host_path_cmd - $to_host_path_cmd "$1" - } -@@ -3122,7 +1829,7 @@ - # Copy ARG to func_to_host_path_result. - func_convert_path_noop () - { -- func_to_host_path_result=$1 -+ func_to_host_path_result="$1" - } - # end func_convert_path_noop - -@@ -3133,9 +1840,8 @@ - # func_to_host_path_result. - func_convert_path_msys_to_w32 () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # Remove leading and trailing path separator characters from ARG. MSYS - # behavior is inconsistent here; cygpath turns them into '.;' and ';.'; -@@ -3143,7 +1849,7 @@ - func_stripname : : "$1" - func_to_host_path_tmp1=$func_stripname_result - func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" -- func_to_host_path_result=$func_convert_core_msys_to_w32_result -+ func_to_host_path_result="$func_convert_core_msys_to_w32_result" - func_convert_path_check : ";" \ - "$func_to_host_path_tmp1" "$func_to_host_path_result" - func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" -@@ -3157,9 +1863,8 @@ - # func_to_host_file_result. - func_convert_path_cygwin_to_w32 () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # See func_convert_path_msys_to_w32: - func_stripname : : "$1" -@@ -3178,15 +1883,14 @@ - # a working winepath. Returns result in func_to_host_file_result. - func_convert_path_nix_to_w32 () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # See func_convert_path_msys_to_w32: - func_stripname : : "$1" - func_to_host_path_tmp1=$func_stripname_result - func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" -- func_to_host_path_result=$func_convert_core_path_wine_to_w32_result -+ func_to_host_path_result="$func_convert_core_path_wine_to_w32_result" - func_convert_path_check : ";" \ - "$func_to_host_path_tmp1" "$func_to_host_path_result" - func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" -@@ -3200,16 +1904,15 @@ - # Returns result in func_to_host_file_result. - func_convert_path_msys_to_cygwin () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # See func_convert_path_msys_to_w32: - func_stripname : : "$1" - func_to_host_path_tmp1=$func_stripname_result - func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" - func_cygpath -u -p "$func_convert_core_msys_to_w32_result" -- func_to_host_path_result=$func_cygpath_result -+ func_to_host_path_result="$func_cygpath_result" - func_convert_path_check : : \ - "$func_to_host_path_tmp1" "$func_to_host_path_result" - func_convert_path_front_back_pathsep ":*" "*:" : "$1" -@@ -3224,9 +1927,8 @@ - # func_to_host_file_result. - func_convert_path_nix_to_cygwin () - { -- $debug_cmd -- -- func_to_host_path_result=$1 -+ $opt_debug -+ func_to_host_path_result="$1" - if test -n "$1"; then - # Remove leading and trailing path separator characters from - # ARG. msys behavior is inconsistent here, cygpath turns them -@@ -3235,7 +1937,7 @@ - func_to_host_path_tmp1=$func_stripname_result - func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" - func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result" -- func_to_host_path_result=$func_cygpath_result -+ func_to_host_path_result="$func_cygpath_result" - func_convert_path_check : : \ - "$func_to_host_path_tmp1" "$func_to_host_path_result" - func_convert_path_front_back_pathsep ":*" "*:" : "$1" -@@ -3244,31 +1946,13 @@ - # end func_convert_path_nix_to_cygwin - - --# func_dll_def_p FILE --# True iff FILE is a Windows DLL '.def' file. --# Keep in sync with _LT_DLL_DEF_P in libtool.m4 --func_dll_def_p () --{ -- $debug_cmd -- -- func_dll_def_p_tmp=`$SED -n \ -- -e 's/^[ ]*//' \ -- -e '/^\(;.*\)*$/d' \ -- -e 's/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p' \ -- -e q \ -- "$1"` -- test DEF = "$func_dll_def_p_tmp" --} -- -- - # func_mode_compile arg... - func_mode_compile () - { -- $debug_cmd -- -+ $opt_debug - # Get the compilation command and the source file. - base_compile= -- srcfile=$nonopt # always keep a non-empty value in "srcfile" -+ srcfile="$nonopt" # always keep a non-empty value in "srcfile" - suppress_opt=yes - suppress_output= - arg_mode=normal -@@ -3281,12 +1965,12 @@ - case $arg_mode in - arg ) - # do not "continue". Instead, add this to base_compile -- lastarg=$arg -+ lastarg="$arg" - arg_mode=normal - ;; - - target ) -- libobj=$arg -+ libobj="$arg" - arg_mode=normal - continue - ;; -@@ -3296,7 +1980,7 @@ - case $arg in - -o) - test -n "$libobj" && \ -- func_fatal_error "you cannot specify '-o' more than once" -+ func_fatal_error "you cannot specify \`-o' more than once" - arg_mode=target - continue - ;; -@@ -3325,12 +2009,12 @@ - func_stripname '-Wc,' '' "$arg" - args=$func_stripname_result - lastarg= -- save_ifs=$IFS; IFS=, -+ save_ifs="$IFS"; IFS=',' - for arg in $args; do -- IFS=$save_ifs -+ IFS="$save_ifs" - func_append_quoted lastarg "$arg" - done -- IFS=$save_ifs -+ IFS="$save_ifs" - func_stripname ' ' '' "$lastarg" - lastarg=$func_stripname_result - -@@ -3343,8 +2027,8 @@ - # Accept the current argument as the source file. - # The previous "srcfile" becomes the current argument. - # -- lastarg=$srcfile -- srcfile=$arg -+ lastarg="$srcfile" -+ srcfile="$arg" - ;; - esac # case $arg - ;; -@@ -3359,13 +2043,13 @@ - func_fatal_error "you must specify an argument for -Xcompile" - ;; - target) -- func_fatal_error "you must specify a target with '-o'" -+ func_fatal_error "you must specify a target with \`-o'" - ;; - *) - # Get the name of the library object. - test -z "$libobj" && { - func_basename "$srcfile" -- libobj=$func_basename_result -+ libobj="$func_basename_result" - } - ;; - esac -@@ -3385,7 +2069,7 @@ - case $libobj in - *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;; - *) -- func_fatal_error "cannot determine name of library object from '$libobj'" -+ func_fatal_error "cannot determine name of library object from \`$libobj'" - ;; - esac - -@@ -3394,8 +2078,8 @@ - for arg in $later; do - case $arg in - -shared) -- test yes = "$build_libtool_libs" \ -- || func_fatal_configuration "cannot build a shared library" -+ test "$build_libtool_libs" != yes && \ -+ func_fatal_configuration "can not build a shared library" - build_old_libs=no - continue - ;; -@@ -3421,17 +2105,17 @@ - func_quote_for_eval "$libobj" - test "X$libobj" != "X$func_quote_for_eval_result" \ - && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \ -- && func_warning "libobj name '$libobj' may not contain shell special characters." -+ && func_warning "libobj name \`$libobj' may not contain shell special characters." - func_dirname_and_basename "$obj" "/" "" -- objname=$func_basename_result -- xdir=$func_dirname_result -- lobj=$xdir$objdir/$objname -+ objname="$func_basename_result" -+ xdir="$func_dirname_result" -+ lobj=${xdir}$objdir/$objname - - test -z "$base_compile" && \ - func_fatal_help "you must specify a compilation command" - - # Delete any leftover library objects. -- if test yes = "$build_old_libs"; then -+ if test "$build_old_libs" = yes; then - removelist="$obj $lobj $libobj ${libobj}T" - else - removelist="$lobj $libobj ${libobj}T" -@@ -3443,16 +2127,16 @@ - pic_mode=default - ;; - esac -- if test no = "$pic_mode" && test pass_all != "$deplibs_check_method"; then -+ if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then - # non-PIC code in shared libraries is not supported - pic_mode=default - fi - - # Calculate the filename of the output object if compiler does - # not support -o with -c -- if test no = "$compiler_c_o"; then -- output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.$objext -- lockfile=$output_obj.lock -+ if test "$compiler_c_o" = no; then -+ output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext} -+ lockfile="$output_obj.lock" - else - output_obj= - need_locks=no -@@ -3461,12 +2145,12 @@ - - # Lock this critical section if it is needed - # We use this script file to make the link, it avoids creating a new file -- if test yes = "$need_locks"; then -+ if test "$need_locks" = yes; then - until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do - func_echo "Waiting for $lockfile to be removed" - sleep 2 - done -- elif test warn = "$need_locks"; then -+ elif test "$need_locks" = warn; then - if test -f "$lockfile"; then - $ECHO "\ - *** ERROR, $lockfile exists and contains: -@@ -3474,7 +2158,7 @@ - - This indicates that another process is trying to use the same - temporary object file, and libtool could not work around it because --your compiler does not support '-c' and '-o' together. If you -+your compiler does not support \`-c' and \`-o' together. If you - repeat this compilation, it may succeed, by chance, but you had better - avoid parallel builds (make -j) in this platform, or get a better - compiler." -@@ -3496,11 +2180,11 @@ - qsrcfile=$func_quote_for_eval_result - - # Only build a PIC object if we are building libtool libraries. -- if test yes = "$build_libtool_libs"; then -+ if test "$build_libtool_libs" = yes; then - # Without this assignment, base_compile gets emptied. - fbsd_hideous_sh_bug=$base_compile - -- if test no != "$pic_mode"; then -+ if test "$pic_mode" != no; then - command="$base_compile $qsrcfile $pic_flag" - else - # Don't build PIC code -@@ -3517,7 +2201,7 @@ - func_show_eval_locale "$command" \ - 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE' - -- if test warn = "$need_locks" && -+ if test "$need_locks" = warn && - test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then - $ECHO "\ - *** ERROR, $lockfile contains: -@@ -3528,7 +2212,7 @@ - - This indicates that another process is trying to use the same - temporary object file, and libtool could not work around it because --your compiler does not support '-c' and '-o' together. If you -+your compiler does not support \`-c' and \`-o' together. If you - repeat this compilation, it may succeed, by chance, but you had better - avoid parallel builds (make -j) in this platform, or get a better - compiler." -@@ -3544,20 +2228,20 @@ - fi - - # Allow error messages only from the first compilation. -- if test yes = "$suppress_opt"; then -+ if test "$suppress_opt" = yes; then - suppress_output=' >/dev/null 2>&1' - fi - fi - - # Only build a position-dependent object if we build old libraries. -- if test yes = "$build_old_libs"; then -- if test yes != "$pic_mode"; then -+ if test "$build_old_libs" = yes; then -+ if test "$pic_mode" != yes; then - # Don't build PIC code - command="$base_compile $qsrcfile$pie_flag" - else - command="$base_compile $qsrcfile $pic_flag" - fi -- if test yes = "$compiler_c_o"; then -+ if test "$compiler_c_o" = yes; then - func_append command " -o $obj" - fi - -@@ -3566,7 +2250,7 @@ - func_show_eval_locale "$command" \ - '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' - -- if test warn = "$need_locks" && -+ if test "$need_locks" = warn && - test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then - $ECHO "\ - *** ERROR, $lockfile contains: -@@ -3577,7 +2261,7 @@ - - This indicates that another process is trying to use the same - temporary object file, and libtool could not work around it because --your compiler does not support '-c' and '-o' together. If you -+your compiler does not support \`-c' and \`-o' together. If you - repeat this compilation, it may succeed, by chance, but you had better - avoid parallel builds (make -j) in this platform, or get a better - compiler." -@@ -3597,7 +2281,7 @@ - func_write_libtool_object "$libobj" "$objdir/$objname" "$objname" - - # Unlock the critical section if it was locked -- if test no != "$need_locks"; then -+ if test "$need_locks" != no; then - removelist=$lockfile - $RM "$lockfile" - fi -@@ -3607,7 +2291,7 @@ - } - - $opt_help || { -- test compile = "$opt_mode" && func_mode_compile ${1+"$@"} -+ test "$opt_mode" = compile && func_mode_compile ${1+"$@"} - } - - func_mode_help () -@@ -3627,7 +2311,7 @@ - Remove files from the build directory. - - RM is the name of the program to use to delete files associated with each FILE --(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed -+(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed - to RM. - - If FILE is a libtool library, object or program, all the files associated -@@ -3646,16 +2330,16 @@ - -no-suppress do not suppress compiler output for multiple passes - -prefer-pic try to build PIC objects only - -prefer-non-pic try to build non-PIC objects only -- -shared do not build a '.o' file suitable for static linking -- -static only build a '.o' file suitable for static linking -+ -shared do not build a \`.o' file suitable for static linking -+ -static only build a \`.o' file suitable for static linking - -Wc,FLAG pass FLAG directly to the compiler - --COMPILE-COMMAND is a command to be used in creating a 'standard' object file -+COMPILE-COMMAND is a command to be used in creating a \`standard' object file - from the given SOURCEFILE. - - The output file name is determined by removing the directory component from --SOURCEFILE, then substituting the C source code suffix '.c' with the --library object suffix, '.lo'." -+SOURCEFILE, then substituting the C source code suffix \`.c' with the -+library object suffix, \`.lo'." - ;; - - execute) -@@ -3668,7 +2352,7 @@ - - -dlopen FILE add the directory containing FILE to the library path - --This mode sets the library path environment variable according to '-dlopen' -+This mode sets the library path environment variable according to \`-dlopen' - flags. - - If any of the ARGS are libtool executable wrappers, then they are translated -@@ -3687,7 +2371,7 @@ - Each LIBDIR is a directory that contains libtool libraries. - - The commands that this mode executes may require superuser privileges. Use --the '--dry-run' option if you just want to see what would be executed." -+the \`--dry-run' option if you just want to see what would be executed." - ;; - - install) -@@ -3697,7 +2381,7 @@ - Install executables or libraries. - - INSTALL-COMMAND is the installation command. The first component should be --either the 'install' or 'cp' program. -+either the \`install' or \`cp' program. - - The following components of INSTALL-COMMAND are treated specially: - -@@ -3723,7 +2407,7 @@ - -avoid-version do not add a version suffix if possible - -bindir BINDIR specify path to binaries directory (for systems where - libraries must be found in the PATH setting at runtime) -- -dlopen FILE '-dlpreopen' FILE if it cannot be dlopened at runtime -+ -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime - -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols - -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) - -export-symbols SYMFILE -@@ -3737,8 +2421,7 @@ - -no-install link a not-installable executable - -no-undefined declare that a library does not refer to external symbols - -o OUTPUT-FILE create OUTPUT-FILE from the specified objects -- -objectlist FILE use a list of object files found in FILE to specify objects -- -os2dllname NAME force a short DLL name on OS/2 (no effect on other OSes) -+ -objectlist FILE Use a list of object files found in FILE to specify objects - -precious-files-regex REGEX - don't remove output files matching REGEX - -release RELEASE specify package release information -@@ -3758,20 +2441,20 @@ - -Xlinker FLAG pass linker-specific FLAG directly to the linker - -XCClinker FLAG pass link-specific FLAG to the compiler driver (CC) - --All other options (arguments beginning with '-') are ignored. -+All other options (arguments beginning with \`-') are ignored. - --Every other argument is treated as a filename. Files ending in '.la' are -+Every other argument is treated as a filename. Files ending in \`.la' are - treated as uninstalled libtool libraries, other files are standard or library - object files. - --If the OUTPUT-FILE ends in '.la', then a libtool library is created, --only library objects ('.lo' files) may be specified, and '-rpath' is -+If the OUTPUT-FILE ends in \`.la', then a libtool library is created, -+only library objects (\`.lo' files) may be specified, and \`-rpath' is - required, except when creating a convenience library. - --If OUTPUT-FILE ends in '.a' or '.lib', then a standard library is created --using 'ar' and 'ranlib', or on Windows using 'lib'. -+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created -+using \`ar' and \`ranlib', or on Windows using \`lib'. - --If OUTPUT-FILE ends in '.lo' or '.$objext', then a reloadable object file -+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file - is created, otherwise an executable program is created." - ;; - -@@ -3782,7 +2465,7 @@ - Remove libraries from an installation directory. - - RM is the name of the program to use to delete files associated with each FILE --(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed -+(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed - to RM. - - If FILE is a libtool library, all the files associated with it are deleted. -@@ -3790,17 +2473,17 @@ - ;; - - *) -- func_fatal_help "invalid operation mode '$opt_mode'" -+ func_fatal_help "invalid operation mode \`$opt_mode'" - ;; - esac - - echo -- $ECHO "Try '$progname --help' for more information about other modes." -+ $ECHO "Try \`$progname --help' for more information about other modes." - } - - # Now that we've collected a possible --mode arg, show help if necessary - if $opt_help; then -- if test : = "$opt_help"; then -+ if test "$opt_help" = :; then - func_mode_help - else - { -@@ -3808,7 +2491,7 @@ - for opt_mode in compile link execute install finish uninstall clean; do - func_mode_help - done -- } | $SED -n '1p; 2,$s/^Usage:/ or: /p' -+ } | sed -n '1p; 2,$s/^Usage:/ or: /p' - { - func_help noexit - for opt_mode in compile link execute install finish uninstall clean; do -@@ -3816,7 +2499,7 @@ - func_mode_help - done - } | -- $SED '1d -+ sed '1d - /^When reporting/,/^Report/{ - H - d -@@ -3833,17 +2516,16 @@ - # func_mode_execute arg... - func_mode_execute () - { -- $debug_cmd -- -+ $opt_debug - # The first argument is the command name. -- cmd=$nonopt -+ cmd="$nonopt" - test -z "$cmd" && \ - func_fatal_help "you must specify a COMMAND" - - # Handle -dlopen flags immediately. - for file in $opt_dlopen; do - test -f "$file" \ -- || func_fatal_help "'$file' is not a file" -+ || func_fatal_help "\`$file' is not a file" - - dir= - case $file in -@@ -3853,7 +2535,7 @@ - - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$file" \ -- || func_fatal_help "'$lib' is not a valid libtool archive" -+ || func_fatal_help "\`$lib' is not a valid libtool archive" - - # Read the libtool library. - dlname= -@@ -3864,18 +2546,18 @@ - if test -z "$dlname"; then - # Warn if it was a shared library. - test -n "$library_names" && \ -- func_warning "'$file' was not linked with '-export-dynamic'" -+ func_warning "\`$file' was not linked with \`-export-dynamic'" - continue - fi - - func_dirname "$file" "" "." -- dir=$func_dirname_result -+ dir="$func_dirname_result" - - if test -f "$dir/$objdir/$dlname"; then - func_append dir "/$objdir" - else - if test ! -f "$dir/$dlname"; then -- func_fatal_error "cannot find '$dlname' in '$dir' or '$dir/$objdir'" -+ func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" - fi - fi - ;; -@@ -3883,18 +2565,18 @@ - *.lo) - # Just add the directory containing the .lo file. - func_dirname "$file" "" "." -- dir=$func_dirname_result -+ dir="$func_dirname_result" - ;; - - *) -- func_warning "'-dlopen' is ignored for non-libtool libraries and objects" -+ func_warning "\`-dlopen' is ignored for non-libtool libraries and objects" - continue - ;; - esac - - # Get the absolute pathname. - absdir=`cd "$dir" && pwd` -- test -n "$absdir" && dir=$absdir -+ test -n "$absdir" && dir="$absdir" - - # Now add the directory to shlibpath_var. - if eval "test -z \"\$$shlibpath_var\""; then -@@ -3906,7 +2588,7 @@ - - # This variable tells wrapper scripts just to set shlibpath_var - # rather than running their programs. -- libtool_execute_magic=$magic -+ libtool_execute_magic="$magic" - - # Check if any of the arguments is a wrapper script. - args= -@@ -3919,12 +2601,12 @@ - if func_ltwrapper_script_p "$file"; then - func_source "$file" - # Transform arg to wrapped name. -- file=$progdir/$program -+ file="$progdir/$program" - elif func_ltwrapper_executable_p "$file"; then - func_ltwrapper_scriptname "$file" - func_source "$func_ltwrapper_scriptname_result" - # Transform arg to wrapped name. -- file=$progdir/$program -+ file="$progdir/$program" - fi - ;; - esac -@@ -3932,15 +2614,7 @@ - func_append_quoted args "$file" - done - -- if $opt_dry_run; then -- # Display what would be done. -- if test -n "$shlibpath_var"; then -- eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" -- echo "export $shlibpath_var" -- fi -- $ECHO "$cmd$args" -- exit $EXIT_SUCCESS -- else -+ if test "X$opt_dry_run" = Xfalse; then - if test -n "$shlibpath_var"; then - # Export the shlibpath_var. - eval "export $shlibpath_var" -@@ -3957,18 +2631,25 @@ - done - - # Now prepare to actually exec the command. -- exec_cmd=\$cmd$args -+ exec_cmd="\$cmd$args" -+ else -+ # Display what would be done. -+ if test -n "$shlibpath_var"; then -+ eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" -+ echo "export $shlibpath_var" -+ fi -+ $ECHO "$cmd$args" -+ exit $EXIT_SUCCESS - fi - } - --test execute = "$opt_mode" && func_mode_execute ${1+"$@"} -+test "$opt_mode" = execute && func_mode_execute ${1+"$@"} - - - # func_mode_finish arg... - func_mode_finish () - { -- $debug_cmd -- -+ $opt_debug - libs= - libdirs= - admincmds= -@@ -3982,11 +2663,11 @@ - if func_lalib_unsafe_p "$opt"; then - func_append libs " $opt" - else -- func_warning "'$opt' is not a valid libtool archive" -+ func_warning "\`$opt' is not a valid libtool archive" - fi - - else -- func_fatal_error "invalid argument '$opt'" -+ func_fatal_error "invalid argument \`$opt'" - fi - done - -@@ -4001,12 +2682,12 @@ - # Remove sysroot references - if $opt_dry_run; then - for lib in $libs; do -- echo "removing references to $lt_sysroot and '=' prefixes from $lib" -+ echo "removing references to $lt_sysroot and \`=' prefixes from $lib" - done - else - tmpdir=`func_mktempdir` - for lib in $libs; do -- $SED -e "$sysroot_cmd s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ -+ sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ - > $tmpdir/tmp-la - mv -f $tmpdir/tmp-la $lib - done -@@ -4031,7 +2712,7 @@ - fi - - # Exit here if they wanted silent mode. -- $opt_quiet && exit $EXIT_SUCCESS -+ $opt_silent && exit $EXIT_SUCCESS - - if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then - echo "----------------------------------------------------------------------" -@@ -4042,27 +2723,27 @@ - echo - echo "If you ever happen to want to link against installed libraries" - echo "in a given directory, LIBDIR, you must either use libtool, and" -- echo "specify the full pathname of the library, or use the '-LLIBDIR'" -+ echo "specify the full pathname of the library, or use the \`-LLIBDIR'" - echo "flag during linking and do at least one of the following:" - if test -n "$shlibpath_var"; then -- echo " - add LIBDIR to the '$shlibpath_var' environment variable" -+ echo " - add LIBDIR to the \`$shlibpath_var' environment variable" - echo " during execution" - fi - if test -n "$runpath_var"; then -- echo " - add LIBDIR to the '$runpath_var' environment variable" -+ echo " - add LIBDIR to the \`$runpath_var' environment variable" - echo " during linking" - fi - if test -n "$hardcode_libdir_flag_spec"; then - libdir=LIBDIR - eval flag=\"$hardcode_libdir_flag_spec\" - -- $ECHO " - use the '$flag' linker flag" -+ $ECHO " - use the \`$flag' linker flag" - fi - if test -n "$admincmds"; then - $ECHO " - have your system administrator run these commands:$admincmds" - fi - if test -f /etc/ld.so.conf; then -- echo " - have your system administrator add LIBDIR to '/etc/ld.so.conf'" -+ echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" - fi - echo - -@@ -4081,20 +2762,18 @@ - exit $EXIT_SUCCESS - } - --test finish = "$opt_mode" && func_mode_finish ${1+"$@"} -+test "$opt_mode" = finish && func_mode_finish ${1+"$@"} - - - # func_mode_install arg... - func_mode_install () - { -- $debug_cmd -- -+ $opt_debug - # There may be an optional sh(1) argument at the beginning of - # install_prog (especially on Windows NT). -- if test "$SHELL" = "$nonopt" || test /bin/sh = "$nonopt" || -+ if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh || - # Allow the use of GNU shtool's install command. -- case $nonopt in *shtool*) :;; *) false;; esac -- then -+ case $nonopt in *shtool*) :;; *) false;; esac; then - # Aesthetically quote it. - func_quote_for_eval "$nonopt" - install_prog="$func_quote_for_eval_result " -@@ -4121,7 +2800,7 @@ - opts= - prev= - install_type= -- isdir=false -+ isdir=no - stripme= - no_mode=: - for arg -@@ -4134,7 +2813,7 @@ - fi - - case $arg in -- -d) isdir=: ;; -+ -d) isdir=yes ;; - -f) - if $install_cp; then :; else - prev=$arg -@@ -4152,7 +2831,7 @@ - *) - # If the previous option needed an argument, then skip it. - if test -n "$prev"; then -- if test X-m = "X$prev" && test -n "$install_override_mode"; then -+ if test "x$prev" = x-m && test -n "$install_override_mode"; then - arg2=$install_override_mode - no_mode=false - fi -@@ -4177,7 +2856,7 @@ - func_fatal_help "you must specify an install program" - - test -n "$prev" && \ -- func_fatal_help "the '$prev' option requires an argument" -+ func_fatal_help "the \`$prev' option requires an argument" - - if test -n "$install_override_mode" && $no_mode; then - if $install_cp; then :; else -@@ -4199,19 +2878,19 @@ - dest=$func_stripname_result - - # Check to see that the destination is a directory. -- test -d "$dest" && isdir=: -- if $isdir; then -- destdir=$dest -+ test -d "$dest" && isdir=yes -+ if test "$isdir" = yes; then -+ destdir="$dest" - destname= - else - func_dirname_and_basename "$dest" "" "." -- destdir=$func_dirname_result -- destname=$func_basename_result -+ destdir="$func_dirname_result" -+ destname="$func_basename_result" - - # Not a directory, so check to see that there is only one file specified. - set dummy $files; shift - test "$#" -gt 1 && \ -- func_fatal_help "'$dest' is not a directory" -+ func_fatal_help "\`$dest' is not a directory" - fi - case $destdir in - [\\/]* | [A-Za-z]:[\\/]*) ;; -@@ -4220,7 +2899,7 @@ - case $file in - *.lo) ;; - *) -- func_fatal_help "'$destdir' must be an absolute directory name" -+ func_fatal_help "\`$destdir' must be an absolute directory name" - ;; - esac - done -@@ -4229,7 +2908,7 @@ - - # This variable tells wrapper scripts just to set variables rather - # than running their programs. -- libtool_install_magic=$magic -+ libtool_install_magic="$magic" - - staticlibs= - future_libdirs= -@@ -4249,7 +2928,7 @@ - - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$file" \ -- || func_fatal_help "'$file' is not a valid libtool archive" -+ || func_fatal_help "\`$file' is not a valid libtool archive" - - library_names= - old_library= -@@ -4271,7 +2950,7 @@ - fi - - func_dirname "$file" "/" "" -- dir=$func_dirname_result -+ dir="$func_dirname_result" - func_append dir "$objdir" - - if test -n "$relink_command"; then -@@ -4285,7 +2964,7 @@ - # are installed into $libdir/../bin (currently, that works fine) - # but it's something to keep an eye on. - test "$inst_prefix_dir" = "$destdir" && \ -- func_fatal_error "error: cannot install '$file' to a directory not ending in $libdir" -+ func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir" - - if test -n "$inst_prefix_dir"; then - # Stick the inst_prefix_dir data into the link command. -@@ -4294,36 +2973,29 @@ - relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"` - fi - -- func_warning "relinking '$file'" -+ func_warning "relinking \`$file'" - func_show_eval "$relink_command" \ -- 'func_fatal_error "error: relink '\''$file'\'' with the above command before installing it"' -+ 'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"' - fi - - # See the names of the shared library. - set dummy $library_names; shift - if test -n "$1"; then -- realname=$1 -+ realname="$1" - shift - -- srcname=$realname -- test -n "$relink_command" && srcname=${realname}T -+ srcname="$realname" -+ test -n "$relink_command" && srcname="$realname"T - - # Install the shared library and build the symlinks. - func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \ - 'exit $?' -- tstripme=$stripme -+ tstripme="$stripme" - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - case $realname in - *.dll.a) -- tstripme= -- ;; -- esac -- ;; -- os2*) -- case $realname in -- *_dll.a) -- tstripme= -+ tstripme="" - ;; - esac - ;; -@@ -4334,7 +3006,7 @@ - - if test "$#" -gt 0; then - # Delete the old symlinks, and create new ones. -- # Try 'ln -sf' first, because the 'ln' binary might depend on -+ # Try `ln -sf' first, because the `ln' binary might depend on - # the symlink we replace! Solaris /bin/ln does not understand -f, - # so we also need to try rm && ln -s. - for linkname -@@ -4345,14 +3017,14 @@ - fi - - # Do each command in the postinstall commands. -- lib=$destdir/$realname -+ lib="$destdir/$realname" - func_execute_cmds "$postinstall_cmds" 'exit $?' - fi - - # Install the pseudo-library for information purposes. - func_basename "$file" -- name=$func_basename_result -- instname=$dir/${name}i -+ name="$func_basename_result" -+ instname="$dir/$name"i - func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' - - # Maybe install the static library, too. -@@ -4364,11 +3036,11 @@ - - # Figure out destination file name, if it wasn't already specified. - if test -n "$destname"; then -- destfile=$destdir/$destname -+ destfile="$destdir/$destname" - else - func_basename "$file" -- destfile=$func_basename_result -- destfile=$destdir/$destfile -+ destfile="$func_basename_result" -+ destfile="$destdir/$destfile" - fi - - # Deduce the name of the destination old-style object file. -@@ -4378,11 +3050,11 @@ - staticdest=$func_lo2o_result - ;; - *.$objext) -- staticdest=$destfile -+ staticdest="$destfile" - destfile= - ;; - *) -- func_fatal_help "cannot copy a libtool object to '$destfile'" -+ func_fatal_help "cannot copy a libtool object to \`$destfile'" - ;; - esac - -@@ -4391,7 +3063,7 @@ - func_show_eval "$install_prog $file $destfile" 'exit $?' - - # Install the old object if enabled. -- if test yes = "$build_old_libs"; then -+ if test "$build_old_libs" = yes; then - # Deduce the name of the old-style object file. - func_lo2o "$file" - staticobj=$func_lo2o_result -@@ -4403,23 +3075,23 @@ - *) - # Figure out destination file name, if it wasn't already specified. - if test -n "$destname"; then -- destfile=$destdir/$destname -+ destfile="$destdir/$destname" - else - func_basename "$file" -- destfile=$func_basename_result -- destfile=$destdir/$destfile -+ destfile="$func_basename_result" -+ destfile="$destdir/$destfile" - fi - - # If the file is missing, and there is a .exe on the end, strip it - # because it is most likely a libtool script we actually want to - # install -- stripped_ext= -+ stripped_ext="" - case $file in - *.exe) - if test ! -f "$file"; then - func_stripname '' '.exe' "$file" - file=$func_stripname_result -- stripped_ext=.exe -+ stripped_ext=".exe" - fi - ;; - esac -@@ -4447,19 +3119,19 @@ - - # Check the variables that should have been set. - test -z "$generated_by_libtool_version" && \ -- func_fatal_error "invalid libtool wrapper script '$wrapper'" -+ func_fatal_error "invalid libtool wrapper script \`$wrapper'" - -- finalize=: -+ finalize=yes - for lib in $notinst_deplibs; do - # Check to see that each library is installed. - libdir= - if test -f "$lib"; then - func_source "$lib" - fi -- libfile=$libdir/`$ECHO "$lib" | $SED 's%^.*/%%g'` -+ libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test - if test -n "$libdir" && test ! -f "$libfile"; then -- func_warning "'$lib' has not been installed in '$libdir'" -- finalize=false -+ func_warning "\`$lib' has not been installed in \`$libdir'" -+ finalize=no - fi - done - -@@ -4467,29 +3139,29 @@ - func_source "$wrapper" - - outputname= -- if test no = "$fast_install" && test -n "$relink_command"; then -+ if test "$fast_install" = no && test -n "$relink_command"; then - $opt_dry_run || { -- if $finalize; then -+ if test "$finalize" = yes; then - tmpdir=`func_mktempdir` - func_basename "$file$stripped_ext" -- file=$func_basename_result -- outputname=$tmpdir/$file -+ file="$func_basename_result" -+ outputname="$tmpdir/$file" - # Replace the output file specification. - relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'` - -- $opt_quiet || { -+ $opt_silent || { - func_quote_for_expand "$relink_command" - eval "func_echo $func_quote_for_expand_result" - } - if eval "$relink_command"; then : - else -- func_error "error: relink '$file' with the above command before installing it" -+ func_error "error: relink \`$file' with the above command before installing it" - $opt_dry_run || ${RM}r "$tmpdir" - continue - fi -- file=$outputname -+ file="$outputname" - else -- func_warning "cannot relink '$file'" -+ func_warning "cannot relink \`$file'" - fi - } - else -@@ -4526,10 +3198,10 @@ - - for file in $staticlibs; do - func_basename "$file" -- name=$func_basename_result -+ name="$func_basename_result" - - # Set up the ranlib parameters. -- oldlib=$destdir/$name -+ oldlib="$destdir/$name" - func_to_tool_file "$oldlib" func_convert_file_msys_to_w32 - tool_oldlib=$func_to_tool_file_result - -@@ -4544,18 +3216,18 @@ - done - - test -n "$future_libdirs" && \ -- func_warning "remember to run '$progname --finish$future_libdirs'" -+ func_warning "remember to run \`$progname --finish$future_libdirs'" - - if test -n "$current_libdirs"; then - # Maybe just do a dry run. - $opt_dry_run && current_libdirs=" -n$current_libdirs" -- exec_cmd='$SHELL "$progpath" $preserve_args --finish$current_libdirs' -+ exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' - else - exit $EXIT_SUCCESS - fi - } - --test install = "$opt_mode" && func_mode_install ${1+"$@"} -+test "$opt_mode" = install && func_mode_install ${1+"$@"} - - - # func_generate_dlsyms outputname originator pic_p -@@ -4563,17 +3235,16 @@ - # a dlpreopen symbol table. - func_generate_dlsyms () - { -- $debug_cmd -- -- my_outputname=$1 -- my_originator=$2 -- my_pic_p=${3-false} -- my_prefix=`$ECHO "$my_originator" | $SED 's%[^a-zA-Z0-9]%_%g'` -+ $opt_debug -+ my_outputname="$1" -+ my_originator="$2" -+ my_pic_p="${3-no}" -+ my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'` - my_dlsyms= - -- if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then -+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then - if test -n "$NM" && test -n "$global_symbol_pipe"; then -- my_dlsyms=${my_outputname}S.c -+ my_dlsyms="${my_outputname}S.c" - else - func_error "not configured to extract global symbols from dlpreopened files" - fi -@@ -4584,7 +3255,7 @@ - "") ;; - *.c) - # Discover the nlist of each of the dlfiles. -- nlist=$output_objdir/$my_outputname.nm -+ nlist="$output_objdir/${my_outputname}.nm" - - func_show_eval "$RM $nlist ${nlist}S ${nlist}T" - -@@ -4592,36 +3263,34 @@ - func_verbose "creating $output_objdir/$my_dlsyms" - - $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\ --/* $my_dlsyms - symbol resolution table for '$my_outputname' dlsym emulation. */ --/* Generated by $PROGRAM (GNU $PACKAGE) $VERSION */ -+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */ -+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */ - - #ifdef __cplusplus - extern \"C\" { - #endif - --#if defined __GNUC__ && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4)) -+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4)) - #pragma GCC diagnostic ignored \"-Wstrict-prototypes\" - #endif - - /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ --#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE --/* DATA imports from DLLs on WIN32 can't be const, because runtime -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime - relocations are performed -- see ld's documentation on pseudo-relocs. */ - # define LT_DLSYM_CONST --#elif defined __osf__ -+#elif defined(__osf__) - /* This system does not cope well with relocations in const data. */ - # define LT_DLSYM_CONST - #else - # define LT_DLSYM_CONST const - #endif - --#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0) -- - /* External symbol declarations for the compiler. */\ - " - -- if test yes = "$dlself"; then -- func_verbose "generating symbol list for '$output'" -+ if test "$dlself" = yes; then -+ func_verbose "generating symbol list for \`$output'" - - $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist" - -@@ -4629,7 +3298,7 @@ - progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP` - for progfile in $progfiles; do - func_to_tool_file "$progfile" func_convert_file_msys_to_w32 -- func_verbose "extracting global C symbols from '$func_to_tool_file_result'" -+ func_verbose "extracting global C symbols from \`$func_to_tool_file_result'" - $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'" - done - -@@ -4649,10 +3318,10 @@ - - # Prepare the list of exported symbols - if test -z "$export_symbols"; then -- export_symbols=$output_objdir/$outputname.exp -+ export_symbols="$output_objdir/$outputname.exp" - $opt_dry_run || { - $RM $export_symbols -- eval "$SED -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' -+ eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' - case $host in - *cygwin* | *mingw* | *cegcc* ) - eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' -@@ -4662,7 +3331,7 @@ - } - else - $opt_dry_run || { -- eval "$SED -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' -+ eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' - eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' - eval '$MV "$nlist"T "$nlist"' - case $host in -@@ -4676,22 +3345,22 @@ - fi - - for dlprefile in $dlprefiles; do -- func_verbose "extracting global C symbols from '$dlprefile'" -+ func_verbose "extracting global C symbols from \`$dlprefile'" - func_basename "$dlprefile" -- name=$func_basename_result -+ name="$func_basename_result" - case $host in - *cygwin* | *mingw* | *cegcc* ) - # if an import library, we need to obtain dlname - if func_win32_import_lib_p "$dlprefile"; then - func_tr_sh "$dlprefile" - eval "curr_lafile=\$libfile_$func_tr_sh_result" -- dlprefile_dlbasename= -+ dlprefile_dlbasename="" - if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then - # Use subshell, to avoid clobbering current variable values - dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"` -- if test -n "$dlprefile_dlname"; then -+ if test -n "$dlprefile_dlname" ; then - func_basename "$dlprefile_dlname" -- dlprefile_dlbasename=$func_basename_result -+ dlprefile_dlbasename="$func_basename_result" - else - # no lafile. user explicitly requested -dlpreopen <import library>. - $sharedlib_from_linklib_cmd "$dlprefile" -@@ -4699,7 +3368,7 @@ - fi - fi - $opt_dry_run || { -- if test -n "$dlprefile_dlbasename"; then -+ if test -n "$dlprefile_dlbasename" ; then - eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"' - else - func_warning "Could not compute DLL name from $name" -@@ -4755,11 +3424,6 @@ - echo '/* NONE */' >> "$output_objdir/$my_dlsyms" - fi - -- func_show_eval '$RM "${nlist}I"' -- if test -n "$global_symbol_to_import"; then -- eval "$global_symbol_to_import"' < "$nlist"S > "$nlist"I' -- fi -- - echo >> "$output_objdir/$my_dlsyms" "\ - - /* The mapping between symbol names and symbols. */ -@@ -4768,30 +3432,11 @@ - void *address; - } lt_dlsymlist; - extern LT_DLSYM_CONST lt_dlsymlist --lt_${my_prefix}_LTX_preloaded_symbols[];\ --" -- -- if test -s "$nlist"I; then -- echo >> "$output_objdir/$my_dlsyms" "\ --static void lt_syminit(void) --{ -- LT_DLSYM_CONST lt_dlsymlist *symbol = lt_${my_prefix}_LTX_preloaded_symbols; -- for (; symbol->name; ++symbol) -- {" -- $SED 's/.*/ if (STREQ (symbol->name, \"&\")) symbol->address = (void *) \&&;/' < "$nlist"I >> "$output_objdir/$my_dlsyms" -- echo >> "$output_objdir/$my_dlsyms" "\ -- } --}" -- fi -- echo >> "$output_objdir/$my_dlsyms" "\ -+lt_${my_prefix}_LTX_preloaded_symbols[]; - LT_DLSYM_CONST lt_dlsymlist - lt_${my_prefix}_LTX_preloaded_symbols[] = --{ {\"$my_originator\", (void *) 0}," -- -- if test -s "$nlist"I; then -- echo >> "$output_objdir/$my_dlsyms" "\ -- {\"@INIT@\", (void *) <_syminit}," -- fi -+{\ -+ { \"$my_originator\", (void *) 0 }," - - case $need_lib_prefix in - no) -@@ -4833,7 +3478,9 @@ - *-*-hpux*) - pic_flag_for_symtable=" $pic_flag" ;; - *) -- $my_pic_p && pic_flag_for_symtable=" $pic_flag" -+ if test "X$my_pic_p" != Xno; then -+ pic_flag_for_symtable=" $pic_flag" -+ fi - ;; - esac - ;; -@@ -4850,10 +3497,10 @@ - func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?' - - # Clean up the generated files. -- func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T" "${nlist}I"' -+ func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"' - - # Transform the symbol file into the correct name. -- symfileobj=$output_objdir/${my_outputname}S.$objext -+ symfileobj="$output_objdir/${my_outputname}S.$objext" - case $host in - *cygwin* | *mingw* | *cegcc* ) - if test -f "$output_objdir/$my_outputname.def"; then -@@ -4871,7 +3518,7 @@ - esac - ;; - *) -- func_fatal_error "unknown suffix for '$my_dlsyms'" -+ func_fatal_error "unknown suffix for \`$my_dlsyms'" - ;; - esac - else -@@ -4885,32 +3532,6 @@ - fi - } - --# func_cygming_gnu_implib_p ARG --# This predicate returns with zero status (TRUE) if --# ARG is a GNU/binutils-style import library. Returns --# with nonzero status (FALSE) otherwise. --func_cygming_gnu_implib_p () --{ -- $debug_cmd -- -- func_to_tool_file "$1" func_convert_file_msys_to_w32 -- func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` -- test -n "$func_cygming_gnu_implib_tmp" --} -- --# func_cygming_ms_implib_p ARG --# This predicate returns with zero status (TRUE) if --# ARG is an MS-style import library. Returns --# with nonzero status (FALSE) otherwise. --func_cygming_ms_implib_p () --{ -- $debug_cmd -- -- func_to_tool_file "$1" func_convert_file_msys_to_w32 -- func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` -- test -n "$func_cygming_ms_implib_tmp" --} -- - # func_win32_libid arg - # return the library type of file 'arg' - # -@@ -4920,9 +3541,8 @@ - # Despite the name, also deal with 64 bit binaries. - func_win32_libid () - { -- $debug_cmd -- -- win32_libid_type=unknown -+ $opt_debug -+ win32_libid_type="unknown" - win32_fileres=`file -L $1 2>/dev/null` - case $win32_fileres in - *ar\ archive\ import\ library*) # definitely import -@@ -4932,29 +3552,16 @@ - # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD. - if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | - $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then -- case $nm_interface in -- "MS dumpbin") -- if func_cygming_ms_implib_p "$1" || -- func_cygming_gnu_implib_p "$1" -- then -- win32_nmres=import -- else -- win32_nmres= -- fi -- ;; -- *) -- func_to_tool_file "$1" func_convert_file_msys_to_w32 -- win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | -- $SED -n -e ' -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | -+ $SED -n -e ' - 1,100{ - / I /{ -- s|.*|import| -+ s,.*,import, - p - q - } - }'` -- ;; -- esac - case $win32_nmres in - import*) win32_libid_type="x86 archive import";; - *) win32_libid_type="x86 archive static";; -@@ -4986,8 +3593,7 @@ - # $sharedlib_from_linklib_result - func_cygming_dll_for_implib () - { -- $debug_cmd -- -+ $opt_debug - sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"` - } - -@@ -5004,8 +3610,7 @@ - # specified import library. - func_cygming_dll_for_implib_fallback_core () - { -- $debug_cmd -- -+ $opt_debug - match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"` - $OBJDUMP -s --section "$1" "$2" 2>/dev/null | - $SED '/^Contents of section '"$match_literal"':/{ -@@ -5041,8 +3646,8 @@ - /./p' | - # we now have a list, one entry per line, of the stringified - # contents of the appropriate section of all members of the -- # archive that possess that section. Heuristic: eliminate -- # all those that have a first or second character that is -+ # archive which possess that section. Heuristic: eliminate -+ # all those which have a first or second character that is - # a '.' (that is, objdump's representation of an unprintable - # character.) This should work for all archives with less than - # 0x302f exports -- but will fail for DLLs whose name actually -@@ -5053,6 +3658,30 @@ - $SED -e '/^\./d;/^.\./d;q' - } - -+# func_cygming_gnu_implib_p ARG -+# This predicate returns with zero status (TRUE) if -+# ARG is a GNU/binutils-style import library. Returns -+# with nonzero status (FALSE) otherwise. -+func_cygming_gnu_implib_p () -+{ -+ $opt_debug -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` -+ test -n "$func_cygming_gnu_implib_tmp" -+} -+ -+# func_cygming_ms_implib_p ARG -+# This predicate returns with zero status (TRUE) if -+# ARG is an MS-style import library. Returns -+# with nonzero status (FALSE) otherwise. -+func_cygming_ms_implib_p () -+{ -+ $opt_debug -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` -+ test -n "$func_cygming_ms_implib_tmp" -+} -+ - # func_cygming_dll_for_implib_fallback ARG - # Platform-specific function to extract the - # name of the DLL associated with the specified -@@ -5066,17 +3695,16 @@ - # $sharedlib_from_linklib_result - func_cygming_dll_for_implib_fallback () - { -- $debug_cmd -- -- if func_cygming_gnu_implib_p "$1"; then -+ $opt_debug -+ if func_cygming_gnu_implib_p "$1" ; then - # binutils import library - sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"` -- elif func_cygming_ms_implib_p "$1"; then -+ elif func_cygming_ms_implib_p "$1" ; then - # ms-generated import library - sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"` - else - # unknown -- sharedlib_from_linklib_result= -+ sharedlib_from_linklib_result="" - fi - } - -@@ -5084,11 +3712,10 @@ - # func_extract_an_archive dir oldlib - func_extract_an_archive () - { -- $debug_cmd -- -- f_ex_an_ar_dir=$1; shift -- f_ex_an_ar_oldlib=$1 -- if test yes = "$lock_old_archive_extraction"; then -+ $opt_debug -+ f_ex_an_ar_dir="$1"; shift -+ f_ex_an_ar_oldlib="$1" -+ if test "$lock_old_archive_extraction" = yes; then - lockfile=$f_ex_an_ar_oldlib.lock - until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do - func_echo "Waiting for $lockfile to be removed" -@@ -5097,7 +3724,7 @@ - fi - func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \ - 'stat=$?; rm -f "$lockfile"; exit $stat' -- if test yes = "$lock_old_archive_extraction"; then -+ if test "$lock_old_archive_extraction" = yes; then - $opt_dry_run || rm -f "$lockfile" - fi - if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then -@@ -5111,23 +3738,22 @@ - # func_extract_archives gentop oldlib ... - func_extract_archives () - { -- $debug_cmd -- -- my_gentop=$1; shift -+ $opt_debug -+ my_gentop="$1"; shift - my_oldlibs=${1+"$@"} -- my_oldobjs= -- my_xlib= -- my_xabs= -- my_xdir= -+ my_oldobjs="" -+ my_xlib="" -+ my_xabs="" -+ my_xdir="" - - for my_xlib in $my_oldlibs; do - # Extract the objects. - case $my_xlib in -- [\\/]* | [A-Za-z]:[\\/]*) my_xabs=$my_xlib ;; -+ [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;; - *) my_xabs=`pwd`"/$my_xlib" ;; - esac - func_basename "$my_xlib" -- my_xlib=$func_basename_result -+ my_xlib="$func_basename_result" - my_xlib_u=$my_xlib - while :; do - case " $extracted_archives " in -@@ -5139,7 +3765,7 @@ - esac - done - extracted_archives="$extracted_archives $my_xlib_u" -- my_xdir=$my_gentop/$my_xlib_u -+ my_xdir="$my_gentop/$my_xlib_u" - - func_mkdir_p "$my_xdir" - -@@ -5152,23 +3778,22 @@ - cd $my_xdir || exit $? - darwin_archive=$my_xabs - darwin_curdir=`pwd` -- func_basename "$darwin_archive" -- darwin_base_archive=$func_basename_result -+ darwin_base_archive=`basename "$darwin_archive"` - darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true` - if test -n "$darwin_arches"; then - darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'` - darwin_arch= - func_verbose "$darwin_base_archive has multiple architectures $darwin_arches" -- for darwin_arch in $darwin_arches; do -- func_mkdir_p "unfat-$$/$darwin_base_archive-$darwin_arch" -- $LIPO -thin $darwin_arch -output "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" "$darwin_archive" -- cd "unfat-$$/$darwin_base_archive-$darwin_arch" -- func_extract_an_archive "`pwd`" "$darwin_base_archive" -+ for darwin_arch in $darwin_arches ; do -+ func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}" -+ $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}" -+ cd "unfat-$$/${darwin_base_archive}-${darwin_arch}" -+ func_extract_an_archive "`pwd`" "${darwin_base_archive}" - cd "$darwin_curdir" -- $RM "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" -+ $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" - done # $darwin_arches - ## Okay now we've a bunch of thin objects, gotta fatten them up :) -- darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$sed_basename" | sort -u` -+ darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u` - darwin_file= - darwin_files= - for darwin_file in $darwin_filelist; do -@@ -5190,7 +3815,7 @@ - my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP` - done - -- func_extract_archives_result=$my_oldobjs -+ func_extract_archives_result="$my_oldobjs" - } - - -@@ -5205,7 +3830,7 @@ - # - # ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR - # variable will take. If 'yes', then the emitted script --# will assume that the directory where it is stored is -+# will assume that the directory in which it is stored is - # the $objdir directory. This is a cygwin/mingw-specific - # behavior. - func_emit_wrapper () -@@ -5216,7 +3841,7 @@ - #! $SHELL - - # $output - temporary wrapper script for $objdir/$outputname --# Generated by $PROGRAM (GNU $PACKAGE) $VERSION -+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - # - # The $output program cannot be directly executed until all the libtool - # libraries that it depends on are installed. -@@ -5273,9 +3898,9 @@ - - # Very basic option parsing. These options are (a) specific to - # the libtool wrapper, (b) are identical between the wrapper --# /script/ and the wrapper /executable/ that is used only on -+# /script/ and the wrapper /executable/ which is used only on - # windows platforms, and (c) all begin with the string "--lt-" --# (application programs are unlikely to have options that match -+# (application programs are unlikely to have options which match - # this pattern). - # - # There are only two supported options: --lt-debug and -@@ -5308,7 +3933,7 @@ - - # Print the debug banner immediately: - if test -n \"\$lt_option_debug\"; then -- echo \"$outputname:$output:\$LINENO: libtool wrapper (GNU $PACKAGE) $VERSION\" 1>&2 -+ echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2 - fi - } - -@@ -5319,7 +3944,7 @@ - lt_dump_args_N=1; - for lt_arg - do -- \$ECHO \"$outputname:$output:\$LINENO: newargv[\$lt_dump_args_N]: \$lt_arg\" -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\" - lt_dump_args_N=\`expr \$lt_dump_args_N + 1\` - done - } -@@ -5333,7 +3958,7 @@ - *-*-mingw | *-*-os2* | *-cegcc*) - $ECHO "\ - if test -n \"\$lt_option_debug\"; then -- \$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir\\\\\$program\" 1>&2 -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2 - func_lt_dump_args \${1+\"\$@\"} 1>&2 - fi - exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} -@@ -5343,7 +3968,7 @@ - *) - $ECHO "\ - if test -n \"\$lt_option_debug\"; then -- \$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir/\$program\" 1>&2 -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2 - func_lt_dump_args \${1+\"\$@\"} 1>&2 - fi - exec \"\$progdir/\$program\" \${1+\"\$@\"} -@@ -5418,13 +4043,13 @@ - test -n \"\$absdir\" && thisdir=\"\$absdir\" - " - -- if test yes = "$fast_install"; then -+ if test "$fast_install" = yes; then - $ECHO "\ - program=lt-'$outputname'$exeext - progdir=\"\$thisdir/$objdir\" - - if test ! -f \"\$progdir/\$program\" || -- { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | $SED 1q\`; \\ -+ { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\ - test \"X\$file\" != \"X\$progdir/\$program\"; }; then - - file=\"\$\$-\$program\" -@@ -5441,7 +4066,7 @@ - if test -n \"\$relink_command\"; then - if relink_command_output=\`eval \$relink_command 2>&1\`; then : - else -- \$ECHO \"\$relink_command_output\" >&2 -+ $ECHO \"\$relink_command_output\" >&2 - $RM \"\$progdir/\$file\" - exit 1 - fi -@@ -5476,7 +4101,7 @@ - fi - - # Export our shlibpath_var if we have one. -- if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then -+ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then - $ECHO "\ - # Add our own library path to $shlibpath_var - $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" -@@ -5496,7 +4121,7 @@ - fi - else - # The program doesn't exist. -- \$ECHO \"\$0: error: '\$progdir/\$program' does not exist\" 1>&2 -+ \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2 - \$ECHO \"This script is just a wrapper for \$program.\" 1>&2 - \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2 - exit 1 -@@ -5515,7 +4140,7 @@ - cat <<EOF - - /* $cwrappersource - temporary wrapper executable for $objdir/$outputname -- Generated by $PROGRAM (GNU $PACKAGE) $VERSION -+ Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - - The $output program cannot be directly executed until all the libtool - libraries that it depends on are installed. -@@ -5550,45 +4175,47 @@ - #include <fcntl.h> - #include <sys/stat.h> - --#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0) -- - /* declarations of non-ANSI functions */ --#if defined __MINGW32__ -+#if defined(__MINGW32__) - # ifdef __STRICT_ANSI__ - int _putenv (const char *); - # endif --#elif defined __CYGWIN__ -+#elif defined(__CYGWIN__) - # ifdef __STRICT_ANSI__ - char *realpath (const char *, char *); - int putenv (char *); - int setenv (const char *, const char *, int); - # endif --/* #elif defined other_platform || defined ... */ -+/* #elif defined (other platforms) ... */ - #endif - - /* portability defines, excluding path handling macros */ --#if defined _MSC_VER -+#if defined(_MSC_VER) - # define setmode _setmode - # define stat _stat - # define chmod _chmod - # define getcwd _getcwd - # define putenv _putenv - # define S_IXUSR _S_IEXEC --#elif defined __MINGW32__ -+# ifndef _INTPTR_T_DEFINED -+# define _INTPTR_T_DEFINED -+# define intptr_t int -+# endif -+#elif defined(__MINGW32__) - # define setmode _setmode - # define stat _stat - # define chmod _chmod - # define getcwd _getcwd - # define putenv _putenv --#elif defined __CYGWIN__ -+#elif defined(__CYGWIN__) - # define HAVE_SETENV - # define FOPEN_WB "wb" --/* #elif defined other platforms ... */ -+/* #elif defined (other platforms) ... */ - #endif - --#if defined PATH_MAX -+#if defined(PATH_MAX) - # define LT_PATHMAX PATH_MAX --#elif defined MAXPATHLEN -+#elif defined(MAXPATHLEN) - # define LT_PATHMAX MAXPATHLEN - #else - # define LT_PATHMAX 1024 -@@ -5607,8 +4234,8 @@ - # define PATH_SEPARATOR ':' - #endif - --#if defined _WIN32 || defined __MSDOS__ || defined __DJGPP__ || \ -- defined __OS2__ -+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \ -+ defined (__OS2__) - # define HAVE_DOS_BASED_FILE_SYSTEM - # define FOPEN_WB "wb" - # ifndef DIR_SEPARATOR_2 -@@ -5641,10 +4268,10 @@ - - #define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) - #define XFREE(stale) do { \ -- if (stale) { free (stale); stale = 0; } \ -+ if (stale) { free ((void *) stale); stale = 0; } \ - } while (0) - --#if defined LT_DEBUGWRAPPER -+#if defined(LT_DEBUGWRAPPER) - static int lt_debug = 1; - #else - static int lt_debug = 0; -@@ -5673,16 +4300,11 @@ - EOF - - cat <<EOF --#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 5) --# define externally_visible volatile --#else --# define externally_visible __attribute__((externally_visible)) volatile --#endif --externally_visible const char * MAGIC_EXE = "$magic_exe"; -+volatile const char * MAGIC_EXE = "$magic_exe"; - const char * LIB_PATH_VARNAME = "$shlibpath_var"; - EOF - -- if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then -+ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then - func_to_host_path "$temp_rpath" - cat <<EOF - const char * LIB_PATH_VALUE = "$func_to_host_path_result"; -@@ -5706,7 +4328,7 @@ - EOF - fi - -- if test yes = "$fast_install"; then -+ if test "$fast_install" = yes; then - cat <<EOF - const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */ - EOF -@@ -5735,12 +4357,12 @@ - char *actual_cwrapper_name; - char *target_name; - char *lt_argv_zero; -- int rval = 127; -+ intptr_t rval = 127; - - int i; - - program_name = (char *) xstrdup (base_name (argv[0])); -- newargz = XMALLOC (char *, (size_t) argc + 1); -+ newargz = XMALLOC (char *, argc + 1); - - /* very simple arg parsing; don't want to rely on getopt - * also, copy all non cwrapper options to newargz, except -@@ -5749,10 +4371,10 @@ - newargc=0; - for (i = 1; i < argc; i++) - { -- if (STREQ (argv[i], dumpscript_opt)) -+ if (strcmp (argv[i], dumpscript_opt) == 0) - { - EOF -- case $host in -+ case "$host" in - *mingw* | *cygwin* ) - # make stdout use "unix" line endings - echo " setmode(1,_O_BINARY);" -@@ -5763,12 +4385,12 @@ - lt_dump_script (stdout); - return 0; - } -- if (STREQ (argv[i], debug_opt)) -+ if (strcmp (argv[i], debug_opt) == 0) - { - lt_debug = 1; - continue; - } -- if (STREQ (argv[i], ltwrapper_option_prefix)) -+ if (strcmp (argv[i], ltwrapper_option_prefix) == 0) - { - /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX - namespace, but it is not one of the ones we know about and -@@ -5791,7 +4413,7 @@ - EOF - cat <<EOF - /* The GNU banner must be the first non-error debug message */ -- lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE) $VERSION\n"); -+ lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n"); - EOF - cat <<"EOF" - lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]); -@@ -5902,7 +4524,7 @@ - cat <<"EOF" - /* execv doesn't actually work on mingw as expected on unix */ - newargz = prepare_spawn (newargz); -- rval = (int) _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz); -+ rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz); - if (rval == -1) - { - /* failed to start process */ -@@ -5947,7 +4569,7 @@ - { - const char *base; - --#if defined HAVE_DOS_BASED_FILE_SYSTEM -+#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - /* Skip over the disk name in MSDOS pathnames. */ - if (isalpha ((unsigned char) name[0]) && name[1] == ':') - name += 2; -@@ -6006,7 +4628,7 @@ - const char *p_next; - /* static buffer for getcwd */ - char tmp[LT_PATHMAX + 1]; -- size_t tmp_len; -+ int tmp_len; - char *concat_name; - - lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n", -@@ -6016,7 +4638,7 @@ - return NULL; - - /* Absolute path? */ --#if defined HAVE_DOS_BASED_FILE_SYSTEM -+#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':') - { - concat_name = xstrdup (wrapper); -@@ -6034,7 +4656,7 @@ - return concat_name; - XFREE (concat_name); - } --#if defined HAVE_DOS_BASED_FILE_SYSTEM -+#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - } - #endif - -@@ -6057,7 +4679,7 @@ - for (q = p; *q; q++) - if (IS_PATH_SEPARATOR (*q)) - break; -- p_len = (size_t) (q - p); -+ p_len = q - p; - p_next = (*q == '\0' ? q : q + 1); - if (p_len == 0) - { -@@ -6176,7 +4798,7 @@ - if (patlen <= len) - { - str += len - patlen; -- if (STREQ (str, pat)) -+ if (strcmp (str, pat) == 0) - *str = '\0'; - } - return str; -@@ -6241,7 +4863,7 @@ - char *str = xstrdup (value); - setenv (name, str, 1); - #else -- size_t len = strlen (name) + 1 + strlen (value) + 1; -+ int len = strlen (name) + 1 + strlen (value) + 1; - char *str = XMALLOC (char, len); - sprintf (str, "%s=%s", name, value); - if (putenv (str) != EXIT_SUCCESS) -@@ -6258,8 +4880,8 @@ - char *new_value; - if (orig_value && *orig_value) - { -- size_t orig_value_len = strlen (orig_value); -- size_t add_len = strlen (add); -+ int orig_value_len = strlen (orig_value); -+ int add_len = strlen (add); - new_value = XMALLOC (char, add_len + orig_value_len + 1); - if (to_end) - { -@@ -6290,10 +4912,10 @@ - { - char *new_value = lt_extend_str (getenv (name), value, 0); - /* some systems can't cope with a ':'-terminated path #' */ -- size_t len = strlen (new_value); -- while ((len > 0) && IS_PATH_SEPARATOR (new_value[len-1])) -+ int len = strlen (new_value); -+ while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1])) - { -- new_value[--len] = '\0'; -+ new_value[len-1] = '\0'; - } - lt_setenv (name, new_value); - XFREE (new_value); -@@ -6460,47 +5082,27 @@ - # True if ARG is an import lib, as indicated by $file_magic_cmd - func_win32_import_lib_p () - { -- $debug_cmd -- -+ $opt_debug - case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in - *import*) : ;; - *) false ;; - esac - } - --# func_suncc_cstd_abi --# !!ONLY CALL THIS FOR SUN CC AFTER $compile_command IS FULLY EXPANDED!! --# Several compiler flags select an ABI that is incompatible with the --# Cstd library. Avoid specifying it if any are in CXXFLAGS. --func_suncc_cstd_abi () --{ -- $debug_cmd -- -- case " $compile_command " in -- *" -compat=g "*|*\ -std=c++[0-9][0-9]\ *|*" -library=stdcxx4 "*|*" -library=stlport4 "*) -- suncc_use_cstd_abi=no -- ;; -- *) -- suncc_use_cstd_abi=yes -- ;; -- esac --} -- - # func_mode_link arg... - func_mode_link () - { -- $debug_cmd -- -+ $opt_debug - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) - # It is impossible to link a dll without this setting, and - # we shouldn't force the makefile maintainer to figure out -- # what system we are compiling for in order to pass an extra -+ # which system we are compiling for in order to pass an extra - # flag for every libtool invocation. - # allow_undefined=no - - # FIXME: Unfortunately, there are problems with the above when trying -- # to make a dll that has undefined symbols, in which case not -+ # to make a dll which has undefined symbols, in which case not - # even a static library is built. For now, we need to specify - # -no-undefined on the libtool link line when we can be certain - # that all symbols are satisfied, otherwise we get a static library. -@@ -6544,11 +5146,10 @@ - module=no - no_install=no - objs= -- os2dllname= - non_pic_objects= - precious_files_regex= - prefer_static_libs=no -- preload=false -+ preload=no - prev= - prevarg= - release= -@@ -6560,7 +5161,7 @@ - vinfo= - vinfo_number=no - weak_libs= -- single_module=$wl-single_module -+ single_module="${wl}-single_module" - func_infer_tag $base_compile - - # We need to know -static, to get the right output filenames. -@@ -6568,15 +5169,15 @@ - do - case $arg in - -shared) -- test yes != "$build_libtool_libs" \ -- && func_fatal_configuration "cannot build a shared library" -+ test "$build_libtool_libs" != yes && \ -+ func_fatal_configuration "can not build a shared library" - build_old_libs=no - break - ;; - -all-static | -static | -static-libtool-libs) - case $arg in - -all-static) -- if test yes = "$build_libtool_libs" && test -z "$link_static_flag"; then -+ if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then - func_warning "complete static linking is impossible in this configuration" - fi - if test -n "$link_static_flag"; then -@@ -6609,7 +5210,7 @@ - - # Go through the arguments, transforming them on the way. - while test "$#" -gt 0; do -- arg=$1 -+ arg="$1" - shift - func_quote_for_eval "$arg" - qarg=$func_quote_for_eval_unquoted_result -@@ -6626,21 +5227,21 @@ - - case $prev in - bindir) -- bindir=$arg -+ bindir="$arg" - prev= - continue - ;; - dlfiles|dlprefiles) -- $preload || { -+ if test "$preload" = no; then - # Add the symbol object into the linking commands. - func_append compile_command " @SYMFILE@" - func_append finalize_command " @SYMFILE@" -- preload=: -- } -+ preload=yes -+ fi - case $arg in - *.la | *.lo) ;; # We handle these cases below. - force) -- if test no = "$dlself"; then -+ if test "$dlself" = no; then - dlself=needless - export_dynamic=yes - fi -@@ -6648,9 +5249,9 @@ - continue - ;; - self) -- if test dlprefiles = "$prev"; then -+ if test "$prev" = dlprefiles; then - dlself=yes -- elif test dlfiles = "$prev" && test yes != "$dlopen_self"; then -+ elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then - dlself=yes - else - dlself=needless -@@ -6660,7 +5261,7 @@ - continue - ;; - *) -- if test dlfiles = "$prev"; then -+ if test "$prev" = dlfiles; then - func_append dlfiles " $arg" - else - func_append dlprefiles " $arg" -@@ -6671,14 +5272,14 @@ - esac - ;; - expsyms) -- export_symbols=$arg -+ export_symbols="$arg" - test -f "$arg" \ -- || func_fatal_error "symbol file '$arg' does not exist" -+ || func_fatal_error "symbol file \`$arg' does not exist" - prev= - continue - ;; - expsyms_regex) -- export_symbols_regex=$arg -+ export_symbols_regex="$arg" - prev= - continue - ;; -@@ -6696,13 +5297,7 @@ - continue - ;; - inst_prefix) -- inst_prefix_dir=$arg -- prev= -- continue -- ;; -- mllvm) -- # Clang does not use LLVM to link, so we can simply discard any -- # '-mllvm $arg' options when doing the link step. -+ inst_prefix_dir="$arg" - prev= - continue - ;; -@@ -6726,21 +5321,21 @@ - - if test -z "$pic_object" || - test -z "$non_pic_object" || -- test none = "$pic_object" && -- test none = "$non_pic_object"; then -- func_fatal_error "cannot find name of object for '$arg'" -+ test "$pic_object" = none && -+ test "$non_pic_object" = none; then -+ func_fatal_error "cannot find name of object for \`$arg'" - fi - - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" -- xdir=$func_dirname_result -+ xdir="$func_dirname_result" - -- if test none != "$pic_object"; then -+ if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. -- pic_object=$xdir$pic_object -+ pic_object="$xdir$pic_object" - -- if test dlfiles = "$prev"; then -- if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then -+ if test "$prev" = dlfiles; then -+ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - func_append dlfiles " $pic_object" - prev= - continue -@@ -6751,7 +5346,7 @@ - fi - - # CHECK ME: I think I busted this. -Ossama -- if test dlprefiles = "$prev"; then -+ if test "$prev" = dlprefiles; then - # Preload the old-style object. - func_append dlprefiles " $pic_object" - prev= -@@ -6759,23 +5354,23 @@ - - # A PIC object. - func_append libobjs " $pic_object" -- arg=$pic_object -+ arg="$pic_object" - fi - - # Non-PIC object. -- if test none != "$non_pic_object"; then -+ if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. -- non_pic_object=$xdir$non_pic_object -+ non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - func_append non_pic_objects " $non_pic_object" -- if test -z "$pic_object" || test none = "$pic_object"; then -- arg=$non_pic_object -+ if test -z "$pic_object" || test "$pic_object" = none ; then -+ arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. -- non_pic_object=$pic_object -+ non_pic_object="$pic_object" - func_append non_pic_objects " $non_pic_object" - fi - else -@@ -6783,7 +5378,7 @@ - if $opt_dry_run; then - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" -- xdir=$func_dirname_result -+ xdir="$func_dirname_result" - - func_lo2o "$arg" - pic_object=$xdir$objdir/$func_lo2o_result -@@ -6791,29 +5386,24 @@ - func_append libobjs " $pic_object" - func_append non_pic_objects " $non_pic_object" - else -- func_fatal_error "'$arg' is not a valid libtool object" -+ func_fatal_error "\`$arg' is not a valid libtool object" - fi - fi - done - else -- func_fatal_error "link input file '$arg' does not exist" -+ func_fatal_error "link input file \`$arg' does not exist" - fi - arg=$save_arg - prev= - continue - ;; -- os2dllname) -- os2dllname=$arg -- prev= -- continue -- ;; - precious_regex) -- precious_files_regex=$arg -+ precious_files_regex="$arg" - prev= - continue - ;; - release) -- release=-$arg -+ release="-$arg" - prev= - continue - ;; -@@ -6825,7 +5415,7 @@ - func_fatal_error "only absolute run-paths are allowed" - ;; - esac -- if test rpath = "$prev"; then -+ if test "$prev" = rpath; then - case "$rpath " in - *" $arg "*) ;; - *) func_append rpath " $arg" ;; -@@ -6840,7 +5430,7 @@ - continue - ;; - shrext) -- shrext_cmds=$arg -+ shrext_cmds="$arg" - prev= - continue - ;; -@@ -6880,7 +5470,7 @@ - esac - fi # test -n "$prev" - -- prevarg=$arg -+ prevarg="$arg" - - case $arg in - -all-static) -@@ -6894,7 +5484,7 @@ - - -allow-undefined) - # FIXME: remove this flag sometime in the future. -- func_fatal_error "'-allow-undefined' must not be used because it is the default" -+ func_fatal_error "\`-allow-undefined' must not be used because it is the default" - ;; - - -avoid-version) -@@ -6926,7 +5516,7 @@ - if test -n "$export_symbols" || test -n "$export_symbols_regex"; then - func_fatal_error "more than one -exported-symbols argument is not allowed" - fi -- if test X-export-symbols = "X$arg"; then -+ if test "X$arg" = "X-export-symbols"; then - prev=expsyms - else - prev=expsyms_regex -@@ -6960,9 +5550,9 @@ - func_stripname "-L" '' "$arg" - if test -z "$func_stripname_result"; then - if test "$#" -gt 0; then -- func_fatal_error "require no space between '-L' and '$1'" -+ func_fatal_error "require no space between \`-L' and \`$1'" - else -- func_fatal_error "need path for '-L' option" -+ func_fatal_error "need path for \`-L' option" - fi - fi - func_resolve_sysroot "$func_stripname_result" -@@ -6973,8 +5563,8 @@ - *) - absdir=`cd "$dir" && pwd` - test -z "$absdir" && \ -- func_fatal_error "cannot determine absolute directory name of '$dir'" -- dir=$absdir -+ func_fatal_error "cannot determine absolute directory name of \`$dir'" -+ dir="$absdir" - ;; - esac - case "$deplibs " in -@@ -7009,7 +5599,7 @@ - ;; - - -l*) -- if test X-lc = "X$arg" || test X-lm = "X$arg"; then -+ if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*) - # These systems don't actually have a C or math library (as such) -@@ -7017,11 +5607,11 @@ - ;; - *-*-os2*) - # These systems don't actually have a C library (as such) -- test X-lc = "X$arg" && continue -+ test "X$arg" = "X-lc" && continue - ;; -- *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*) -+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc due to us having libc/libc_r. -- test X-lc = "X$arg" && continue -+ test "X$arg" = "X-lc" && continue - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C and math libraries are in the System framework -@@ -7030,16 +5620,16 @@ - ;; - *-*-sco3.2v5* | *-*-sco5v6*) - # Causes problems with __ctype -- test X-lc = "X$arg" && continue -+ test "X$arg" = "X-lc" && continue - ;; - *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) - # Compiler inserts libc in the correct place for threads to work -- test X-lc = "X$arg" && continue -+ test "X$arg" = "X-lc" && continue - ;; - esac -- elif test X-lc_r = "X$arg"; then -+ elif test "X$arg" = "X-lc_r"; then - case $host in -- *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*) -+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc_r directly, use -pthread flag. - continue - ;; -@@ -7049,11 +5639,6 @@ - continue - ;; - -- -mllvm) -- prev=mllvm -- continue -- ;; -- - -module) - module=yes - continue -@@ -7083,7 +5668,7 @@ - ;; - - -multi_module) -- single_module=$wl-multi_module -+ single_module="${wl}-multi_module" - continue - ;; - -@@ -7097,8 +5682,8 @@ - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*) - # The PATH hackery in wrapper scripts is required on Windows - # and Darwin in order for the loader to find any dlls it needs. -- func_warning "'-no-install' is ignored for $host" -- func_warning "assuming '-no-fast-install' instead" -+ func_warning "\`-no-install' is ignored for $host" -+ func_warning "assuming \`-no-fast-install' instead" - fast_install=no - ;; - *) no_install=yes ;; -@@ -7116,11 +5701,6 @@ - continue - ;; - -- -os2dllname) -- prev=os2dllname -- continue -- ;; -- - -o) prev=output ;; - - -precious-files-regex) -@@ -7208,14 +5788,14 @@ - func_stripname '-Wc,' '' "$arg" - args=$func_stripname_result - arg= -- save_ifs=$IFS; IFS=, -+ save_ifs="$IFS"; IFS=',' - for flag in $args; do -- IFS=$save_ifs -+ IFS="$save_ifs" - func_quote_for_eval "$flag" - func_append arg " $func_quote_for_eval_result" - func_append compiler_flags " $func_quote_for_eval_result" - done -- IFS=$save_ifs -+ IFS="$save_ifs" - func_stripname ' ' '' "$arg" - arg=$func_stripname_result - ;; -@@ -7224,15 +5804,15 @@ - func_stripname '-Wl,' '' "$arg" - args=$func_stripname_result - arg= -- save_ifs=$IFS; IFS=, -+ save_ifs="$IFS"; IFS=',' - for flag in $args; do -- IFS=$save_ifs -+ IFS="$save_ifs" - func_quote_for_eval "$flag" - func_append arg " $wl$func_quote_for_eval_result" - func_append compiler_flags " $wl$func_quote_for_eval_result" - func_append linker_flags " $func_quote_for_eval_result" - done -- IFS=$save_ifs -+ IFS="$save_ifs" - func_stripname ' ' '' "$arg" - arg=$func_stripname_result - ;; -@@ -7255,7 +5835,7 @@ - # -msg_* for osf cc - -msg_*) - func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -+ arg="$func_quote_for_eval_result" - ;; - - # Flags to be passed through unchanged, with rationale: -@@ -7267,46 +5847,25 @@ - # -m*, -t[45]*, -txscale* architecture-specific flags for GCC - # -F/path path to uninstalled frameworks, gcc on darwin - # -p, -pg, --coverage, -fprofile-* profiling flags for GCC -- # -fstack-protector* stack protector flags for GCC - # @file GCC response files - # -tp=* Portland pgcc target processor selection - # --sysroot=* for sysroot support -- # -O*, -g*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization -- # -stdlib=* select c++ std lib with clang -+ # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization - -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ - -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \ -- -O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-fstack-protector*|-stdlib=*) -+ -O*|-flto*|-fwhopr*|-fuse-linker-plugin) - func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -+ arg="$func_quote_for_eval_result" - func_append compile_command " $arg" - func_append finalize_command " $arg" - func_append compiler_flags " $arg" - continue - ;; - -- -Z*) -- if test os2 = "`expr $host : '.*\(os2\)'`"; then -- # OS/2 uses -Zxxx to specify OS/2-specific options -- compiler_flags="$compiler_flags $arg" -- func_append compile_command " $arg" -- func_append finalize_command " $arg" -- case $arg in -- -Zlinker | -Zstack) -- prev=xcompiler -- ;; -- esac -- continue -- else -- # Otherwise treat like 'Some other compiler flag' below -- func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -- fi -- ;; -- - # Some other compiler flag. - -* | +*) - func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -+ arg="$func_quote_for_eval_result" - ;; - - *.$objext) -@@ -7327,21 +5886,21 @@ - - if test -z "$pic_object" || - test -z "$non_pic_object" || -- test none = "$pic_object" && -- test none = "$non_pic_object"; then -- func_fatal_error "cannot find name of object for '$arg'" -+ test "$pic_object" = none && -+ test "$non_pic_object" = none; then -+ func_fatal_error "cannot find name of object for \`$arg'" - fi - - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" -- xdir=$func_dirname_result -+ xdir="$func_dirname_result" - -- test none = "$pic_object" || { -+ if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. -- pic_object=$xdir$pic_object -+ pic_object="$xdir$pic_object" - -- if test dlfiles = "$prev"; then -- if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then -+ if test "$prev" = dlfiles; then -+ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - func_append dlfiles " $pic_object" - prev= - continue -@@ -7352,7 +5911,7 @@ - fi - - # CHECK ME: I think I busted this. -Ossama -- if test dlprefiles = "$prev"; then -+ if test "$prev" = dlprefiles; then - # Preload the old-style object. - func_append dlprefiles " $pic_object" - prev= -@@ -7360,23 +5919,23 @@ - - # A PIC object. - func_append libobjs " $pic_object" -- arg=$pic_object -- } -+ arg="$pic_object" -+ fi - - # Non-PIC object. -- if test none != "$non_pic_object"; then -+ if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. -- non_pic_object=$xdir$non_pic_object -+ non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - func_append non_pic_objects " $non_pic_object" -- if test -z "$pic_object" || test none = "$pic_object"; then -- arg=$non_pic_object -+ if test -z "$pic_object" || test "$pic_object" = none ; then -+ arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. -- non_pic_object=$pic_object -+ non_pic_object="$pic_object" - func_append non_pic_objects " $non_pic_object" - fi - else -@@ -7384,7 +5943,7 @@ - if $opt_dry_run; then - # Extract subdirectory from the argument. - func_dirname "$arg" "/" "" -- xdir=$func_dirname_result -+ xdir="$func_dirname_result" - - func_lo2o "$arg" - pic_object=$xdir$objdir/$func_lo2o_result -@@ -7392,7 +5951,7 @@ - func_append libobjs " $pic_object" - func_append non_pic_objects " $non_pic_object" - else -- func_fatal_error "'$arg' is not a valid libtool object" -+ func_fatal_error "\`$arg' is not a valid libtool object" - fi - fi - ;; -@@ -7408,11 +5967,11 @@ - # A libtool-controlled library. - - func_resolve_sysroot "$arg" -- if test dlfiles = "$prev"; then -+ if test "$prev" = dlfiles; then - # This library was specified with -dlopen. - func_append dlfiles " $func_resolve_sysroot_result" - prev= -- elif test dlprefiles = "$prev"; then -+ elif test "$prev" = dlprefiles; then - # The library was specified with -dlpreopen. - func_append dlprefiles " $func_resolve_sysroot_result" - prev= -@@ -7427,7 +5986,7 @@ - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - func_quote_for_eval "$arg" -- arg=$func_quote_for_eval_result -+ arg="$func_quote_for_eval_result" - ;; - esac # arg - -@@ -7439,9 +5998,9 @@ - done # argument parsing loop - - test -n "$prev" && \ -- func_fatal_help "the '$prevarg' option requires an argument" -+ func_fatal_help "the \`$prevarg' option requires an argument" - -- if test yes = "$export_dynamic" && test -n "$export_dynamic_flag_spec"; then -+ if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then - eval arg=\"$export_dynamic_flag_spec\" - func_append compile_command " $arg" - func_append finalize_command " $arg" -@@ -7450,23 +6009,20 @@ - oldlibs= - # calculate the name of the file, without its directory - func_basename "$output" -- outputname=$func_basename_result -- libobjs_save=$libobjs -+ outputname="$func_basename_result" -+ libobjs_save="$libobjs" - - if test -n "$shlibpath_var"; then - # get the directories listed in $shlibpath_var -- eval shlib_search_path=\`\$ECHO \"\$$shlibpath_var\" \| \$SED \'s/:/ /g\'\` -+ eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\` - else - shlib_search_path= - fi - eval sys_lib_search_path=\"$sys_lib_search_path_spec\" - eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" - -- # Definition is injected by LT_CONFIG during libtool generation. -- func_munge_path_list sys_lib_dlsearch_path "$LT_SYS_LIBRARY_PATH" -- - func_dirname "$output" "/" "" -- output_objdir=$func_dirname_result$objdir -+ output_objdir="$func_dirname_result$objdir" - func_to_tool_file "$output_objdir/" - tool_output_objdir=$func_to_tool_file_result - # Create the object directory. -@@ -7489,7 +6045,7 @@ - # Find all interdependent deplibs by searching for libraries - # that are linked more than once (e.g. -la -lb -la) - for deplib in $deplibs; do -- if $opt_preserve_dup_deps; then -+ if $opt_preserve_dup_deps ; then - case "$libs " in - *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac -@@ -7497,7 +6053,7 @@ - func_append libs " $deplib" - done - -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib; then - libs="$predeps $libs $compiler_lib_search_path $postdeps" - - # Compute libraries that are listed more than once in $predeps -@@ -7529,7 +6085,7 @@ - case $file in - *.la) ;; - *) -- func_fatal_help "libraries can '-dlopen' only libtool libraries: $file" -+ func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file" - ;; - esac - done -@@ -7537,7 +6093,7 @@ - prog) - compile_deplibs= - finalize_deplibs= -- alldeplibs=false -+ alldeplibs=no - newdlfiles= - newdlprefiles= - passes="conv scan dlopen dlpreopen link" -@@ -7549,29 +6105,29 @@ - for pass in $passes; do - # The preopen pass in lib mode reverses $deplibs; put it back here - # so that -L comes before libs that need it for instance... -- if test lib,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "lib,link"; then - ## FIXME: Find the place where the list is rebuilt in the wrong - ## order, and fix it there properly - tmp_deplibs= - for deplib in $deplibs; do - tmp_deplibs="$deplib $tmp_deplibs" - done -- deplibs=$tmp_deplibs -+ deplibs="$tmp_deplibs" - fi - -- if test lib,link = "$linkmode,$pass" || -- test prog,scan = "$linkmode,$pass"; then -- libs=$deplibs -+ if test "$linkmode,$pass" = "lib,link" || -+ test "$linkmode,$pass" = "prog,scan"; then -+ libs="$deplibs" - deplibs= - fi -- if test prog = "$linkmode"; then -+ if test "$linkmode" = prog; then - case $pass in -- dlopen) libs=$dlfiles ;; -- dlpreopen) libs=$dlprefiles ;; -+ dlopen) libs="$dlfiles" ;; -+ dlpreopen) libs="$dlprefiles" ;; - link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; - esac - fi -- if test lib,dlpreopen = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "lib,dlpreopen"; then - # Collect and forward deplibs of preopened libtool libs - for lib in $dlprefiles; do - # Ignore non-libtool-libs -@@ -7592,26 +6148,26 @@ - esac - done - done -- libs=$dlprefiles -+ libs="$dlprefiles" - fi -- if test dlopen = "$pass"; then -+ if test "$pass" = dlopen; then - # Collect dlpreopened libraries -- save_deplibs=$deplibs -+ save_deplibs="$deplibs" - deplibs= - fi - - for deplib in $libs; do - lib= -- found=false -+ found=no - case $deplib in - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \ - |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*) -- if test prog,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - func_append compiler_flags " $deplib" -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib ; then - case "$new_inherited_linker_flags " in - *" $deplib "*) ;; - * ) func_append new_inherited_linker_flags " $deplib" ;; -@@ -7621,13 +6177,13 @@ - continue - ;; - -l*) -- if test lib != "$linkmode" && test prog != "$linkmode"; then -- func_warning "'-l' is ignored for archives/objects" -+ if test "$linkmode" != lib && test "$linkmode" != prog; then -+ func_warning "\`-l' is ignored for archives/objects" - continue - fi - func_stripname '-l' '' "$deplib" - name=$func_stripname_result -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib; then - searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path" - else - searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path" -@@ -7635,22 +6191,31 @@ - for searchdir in $searchdirs; do - for search_ext in .la $std_shrext .so .a; do - # Search the libtool library -- lib=$searchdir/lib$name$search_ext -+ lib="$searchdir/lib${name}${search_ext}" - if test -f "$lib"; then -- if test .la = "$search_ext"; then -- found=: -+ if test "$search_ext" = ".la"; then -+ found=yes - else -- found=false -+ found=no - fi - break 2 - fi - done - done -- if $found; then -- # deplib is a libtool library -+ if test "$found" != yes; then -+ # deplib doesn't seem to be a libtool library -+ if test "$linkmode,$pass" = "prog,link"; then -+ compile_deplibs="$deplib $compile_deplibs" -+ finalize_deplibs="$deplib $finalize_deplibs" -+ else -+ deplibs="$deplib $deplibs" -+ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" -+ fi -+ continue -+ else # deplib is a libtool library - # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, - # We need to do some special things here, and not later. -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $deplib "*) - if func_lalib_p "$lib"; then -@@ -7658,19 +6223,19 @@ - old_library= - func_source "$lib" - for l in $old_library $library_names; do -- ll=$l -+ ll="$l" - done -- if test "X$ll" = "X$old_library"; then # only static version available -- found=false -+ if test "X$ll" = "X$old_library" ; then # only static version available -+ found=no - func_dirname "$lib" "" "." -- ladir=$func_dirname_result -+ ladir="$func_dirname_result" - lib=$ladir/$old_library -- if test prog,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" -- test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs" -+ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" - fi - continue - fi -@@ -7679,25 +6244,15 @@ - *) ;; - esac - fi -- else -- # deplib doesn't seem to be a libtool library -- if test prog,link = "$linkmode,$pass"; then -- compile_deplibs="$deplib $compile_deplibs" -- finalize_deplibs="$deplib $finalize_deplibs" -- else -- deplibs="$deplib $deplibs" -- test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs" -- fi -- continue - fi - ;; # -l - *.ltframework) -- if test prog,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib ; then - case "$new_inherited_linker_flags " in - *" $deplib "*) ;; - * ) func_append new_inherited_linker_flags " $deplib" ;; -@@ -7710,18 +6265,18 @@ - case $linkmode in - lib) - deplibs="$deplib $deplibs" -- test conv = "$pass" && continue -+ test "$pass" = conv && continue - newdependency_libs="$deplib $newdependency_libs" - func_stripname '-L' '' "$deplib" - func_resolve_sysroot "$func_stripname_result" - func_append newlib_search_path " $func_resolve_sysroot_result" - ;; - prog) -- if test conv = "$pass"; then -+ if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi -- if test scan = "$pass"; then -+ if test "$pass" = scan; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" -@@ -7732,13 +6287,13 @@ - func_append newlib_search_path " $func_resolve_sysroot_result" - ;; - *) -- func_warning "'-L' is ignored for archives/objects" -+ func_warning "\`-L' is ignored for archives/objects" - ;; - esac # linkmode - continue - ;; # -L - -R*) -- if test link = "$pass"; then -+ if test "$pass" = link; then - func_stripname '-R' '' "$deplib" - func_resolve_sysroot "$func_stripname_result" - dir=$func_resolve_sysroot_result -@@ -7756,7 +6311,7 @@ - lib=$func_resolve_sysroot_result - ;; - *.$libext) -- if test conv = "$pass"; then -+ if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi -@@ -7767,26 +6322,21 @@ - case " $dlpreconveniencelibs " in - *" $deplib "*) ;; - *) -- valid_a_lib=false -+ valid_a_lib=no - case $deplibs_check_method in - match_pattern*) - set dummy $deplibs_check_method; shift - match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` - if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \ - | $EGREP "$match_pattern_regex" > /dev/null; then -- valid_a_lib=: -+ valid_a_lib=yes - fi - ;; - pass_all) -- valid_a_lib=: -+ valid_a_lib=yes - ;; - esac -- if $valid_a_lib; then -- echo -- $ECHO "*** Warning: Linking the shared library $output against the" -- $ECHO "*** static library $deplib is not portable!" -- deplibs="$deplib $deplibs" -- else -+ if test "$valid_a_lib" != yes; then - echo - $ECHO "*** Warning: Trying to link with static lib archive $deplib." - echo "*** I have the capability to make that library automatically link in when" -@@ -7794,13 +6344,18 @@ - echo "*** shared version of the library, which you do not appear to have" - echo "*** because the file extensions .$libext of this argument makes me believe" - echo "*** that it is just a static archive that I should not use here." -+ else -+ echo -+ $ECHO "*** Warning: Linking the shared library $output against the" -+ $ECHO "*** static library $deplib is not portable!" -+ deplibs="$deplib $deplibs" - fi - ;; - esac - continue - ;; - prog) -- if test link != "$pass"; then -+ if test "$pass" != link; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" -@@ -7811,10 +6366,10 @@ - esac # linkmode - ;; # *.$libext - *.lo | *.$objext) -- if test conv = "$pass"; then -+ if test "$pass" = conv; then - deplibs="$deplib $deplibs" -- elif test prog = "$linkmode"; then -- if test dlpreopen = "$pass" || test yes != "$dlopen_support" || test no = "$build_libtool_libs"; then -+ elif test "$linkmode" = prog; then -+ if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then - # If there is no dlopen support or we're linking statically, - # we need to preload. - func_append newdlprefiles " $deplib" -@@ -7827,20 +6382,22 @@ - continue - ;; - %DEPLIBS%) -- alldeplibs=: -+ alldeplibs=yes - continue - ;; - esac # case $deplib - -- $found || test -f "$lib" \ -- || func_fatal_error "cannot find the library '$lib' or unhandled argument '$deplib'" -+ if test "$found" = yes || test -f "$lib"; then : -+ else -+ func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'" -+ fi - - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$lib" \ -- || func_fatal_error "'$lib' is not a valid libtool archive" -+ || func_fatal_error "\`$lib' is not a valid libtool archive" - - func_dirname "$lib" "" "." -- ladir=$func_dirname_result -+ ladir="$func_dirname_result" - - dlname= - dlopen= -@@ -7870,30 +6427,30 @@ - done - fi - dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` -- if test lib,link = "$linkmode,$pass" || -- test prog,scan = "$linkmode,$pass" || -- { test prog != "$linkmode" && test lib != "$linkmode"; }; then -+ if test "$linkmode,$pass" = "lib,link" || -+ test "$linkmode,$pass" = "prog,scan" || -+ { test "$linkmode" != prog && test "$linkmode" != lib; }; then - test -n "$dlopen" && func_append dlfiles " $dlopen" - test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen" - fi - -- if test conv = "$pass"; then -+ if test "$pass" = conv; then - # Only check for convenience libraries - deplibs="$lib $deplibs" - if test -z "$libdir"; then - if test -z "$old_library"; then -- func_fatal_error "cannot find name of link library for '$lib'" -+ func_fatal_error "cannot find name of link library for \`$lib'" - fi - # It is a libtool convenience library, so add in its objects. - func_append convenience " $ladir/$objdir/$old_library" - func_append old_convenience " $ladir/$objdir/$old_library" -- elif test prog != "$linkmode" && test lib != "$linkmode"; then -- func_fatal_error "'$lib' is not a convenience library" -+ elif test "$linkmode" != prog && test "$linkmode" != lib; then -+ func_fatal_error "\`$lib' is not a convenience library" - fi - tmp_libs= - for deplib in $dependency_libs; do - deplibs="$deplib $deplibs" -- if $opt_preserve_dup_deps; then -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in - *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac -@@ -7907,26 +6464,26 @@ - # Get the name of the library we link against. - linklib= - if test -n "$old_library" && -- { test yes = "$prefer_static_libs" || -- test built,no = "$prefer_static_libs,$installed"; }; then -+ { test "$prefer_static_libs" = yes || -+ test "$prefer_static_libs,$installed" = "built,no"; }; then - linklib=$old_library - else - for l in $old_library $library_names; do -- linklib=$l -+ linklib="$l" - done - fi - if test -z "$linklib"; then -- func_fatal_error "cannot find name of link library for '$lib'" -+ func_fatal_error "cannot find name of link library for \`$lib'" - fi - - # This library was specified with -dlopen. -- if test dlopen = "$pass"; then -- test -z "$libdir" \ -- && func_fatal_error "cannot -dlopen a convenience library: '$lib'" -+ if test "$pass" = dlopen; then -+ if test -z "$libdir"; then -+ func_fatal_error "cannot -dlopen a convenience library: \`$lib'" -+ fi - if test -z "$dlname" || -- test yes != "$dlopen_support" || -- test no = "$build_libtool_libs" -- then -+ test "$dlopen_support" != yes || -+ test "$build_libtool_libs" = no; then - # If there is no dlname, no dlopen support or we're linking - # statically, we need to preload. We also need to preload any - # dependent libraries so libltdl's deplib preloader doesn't -@@ -7940,40 +6497,40 @@ - - # We need an absolute path. - case $ladir in -- [\\/]* | [A-Za-z]:[\\/]*) abs_ladir=$ladir ;; -+ [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; - *) - abs_ladir=`cd "$ladir" && pwd` - if test -z "$abs_ladir"; then -- func_warning "cannot determine absolute directory name of '$ladir'" -+ func_warning "cannot determine absolute directory name of \`$ladir'" - func_warning "passing it literally to the linker, although it might fail" -- abs_ladir=$ladir -+ abs_ladir="$ladir" - fi - ;; - esac - func_basename "$lib" -- laname=$func_basename_result -+ laname="$func_basename_result" - - # Find the relevant object directory and library name. -- if test yes = "$installed"; then -+ if test "X$installed" = Xyes; then - if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then -- func_warning "library '$lib' was moved." -- dir=$ladir -- absdir=$abs_ladir -- libdir=$abs_ladir -+ func_warning "library \`$lib' was moved." -+ dir="$ladir" -+ absdir="$abs_ladir" -+ libdir="$abs_ladir" - else -- dir=$lt_sysroot$libdir -- absdir=$lt_sysroot$libdir -+ dir="$lt_sysroot$libdir" -+ absdir="$lt_sysroot$libdir" - fi -- test yes = "$hardcode_automatic" && avoidtemprpath=yes -+ test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes - else - if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then -- dir=$ladir -- absdir=$abs_ladir -+ dir="$ladir" -+ absdir="$abs_ladir" - # Remove this search path later - func_append notinst_path " $abs_ladir" - else -- dir=$ladir/$objdir -- absdir=$abs_ladir/$objdir -+ dir="$ladir/$objdir" -+ absdir="$abs_ladir/$objdir" - # Remove this search path later - func_append notinst_path " $abs_ladir" - fi -@@ -7982,11 +6539,11 @@ - name=$func_stripname_result - - # This library was specified with -dlpreopen. -- if test dlpreopen = "$pass"; then -- if test -z "$libdir" && test prog = "$linkmode"; then -- func_fatal_error "only libraries may -dlpreopen a convenience library: '$lib'" -+ if test "$pass" = dlpreopen; then -+ if test -z "$libdir" && test "$linkmode" = prog; then -+ func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'" - fi -- case $host in -+ case "$host" in - # special handling for platforms with PE-DLLs. - *cygwin* | *mingw* | *cegcc* ) - # Linker will automatically link against shared library if both -@@ -8030,9 +6587,9 @@ - - if test -z "$libdir"; then - # Link the convenience library -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib; then - deplibs="$dir/$old_library $deplibs" -- elif test prog,link = "$linkmode,$pass"; then -+ elif test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$dir/$old_library $compile_deplibs" - finalize_deplibs="$dir/$old_library $finalize_deplibs" - else -@@ -8042,14 +6599,14 @@ - fi - - -- if test prog = "$linkmode" && test link != "$pass"; then -+ if test "$linkmode" = prog && test "$pass" != link; then - func_append newlib_search_path " $ladir" - deplibs="$lib $deplibs" - -- linkalldeplibs=false -- if test no != "$link_all_deplibs" || test -z "$library_names" || -- test no = "$build_libtool_libs"; then -- linkalldeplibs=: -+ linkalldeplibs=no -+ if test "$link_all_deplibs" != no || test -z "$library_names" || -+ test "$build_libtool_libs" = no; then -+ linkalldeplibs=yes - fi - - tmp_libs= -@@ -8061,14 +6618,14 @@ - ;; - esac - # Need to link against all dependency_libs? -- if $linkalldeplibs; then -+ if test "$linkalldeplibs" = yes; then - deplibs="$deplib $deplibs" - else - # Need to hardcode shared library paths - # or/and link against static libraries - newdependency_libs="$deplib $newdependency_libs" - fi -- if $opt_preserve_dup_deps; then -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in - *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac -@@ -8078,15 +6635,15 @@ - continue - fi # $linkmode = prog... - -- if test prog,link = "$linkmode,$pass"; then -+ if test "$linkmode,$pass" = "prog,link"; then - if test -n "$library_names" && -- { { test no = "$prefer_static_libs" || -- test built,yes = "$prefer_static_libs,$installed"; } || -+ { { test "$prefer_static_libs" = no || -+ test "$prefer_static_libs,$installed" = "built,yes"; } || - test -z "$old_library"; }; then - # We need to hardcode the library path -- if test -n "$shlibpath_var" && test -z "$avoidtemprpath"; then -+ if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then - # Make sure the rpath contains only unique directories. -- case $temp_rpath: in -+ case "$temp_rpath:" in - *"$absdir:"*) ;; - *) func_append temp_rpath "$absdir:" ;; - esac -@@ -8115,9 +6672,9 @@ - esac - fi # $linkmode,$pass = prog,link... - -- if $alldeplibs && -- { test pass_all = "$deplibs_check_method" || -- { test yes = "$build_libtool_libs" && -+ if test "$alldeplibs" = yes && -+ { test "$deplibs_check_method" = pass_all || -+ { test "$build_libtool_libs" = yes && - test -n "$library_names"; }; }; then - # We only need to search for static libraries - continue -@@ -8126,19 +6683,19 @@ - - link_static=no # Whether the deplib will be linked statically - use_static_libs=$prefer_static_libs -- if test built = "$use_static_libs" && test yes = "$installed"; then -+ if test "$use_static_libs" = built && test "$installed" = yes; then - use_static_libs=no - fi - if test -n "$library_names" && -- { test no = "$use_static_libs" || test -z "$old_library"; }; then -+ { test "$use_static_libs" = no || test -z "$old_library"; }; then - case $host in -- *cygwin* | *mingw* | *cegcc* | *os2*) -+ *cygwin* | *mingw* | *cegcc*) - # No point in relinking DLLs because paths are not encoded - func_append notinst_deplibs " $lib" - need_relink=no - ;; - *) -- if test no = "$installed"; then -+ if test "$installed" = no; then - func_append notinst_deplibs " $lib" - need_relink=yes - fi -@@ -8148,24 +6705,24 @@ - - # Warn about portability, can't link against -module's on some - # systems (darwin). Don't bleat about dlopened modules though! -- dlopenmodule= -+ dlopenmodule="" - for dlpremoduletest in $dlprefiles; do - if test "X$dlpremoduletest" = "X$lib"; then -- dlopenmodule=$dlpremoduletest -+ dlopenmodule="$dlpremoduletest" - break - fi - done -- if test -z "$dlopenmodule" && test yes = "$shouldnotlink" && test link = "$pass"; then -+ if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then - echo -- if test prog = "$linkmode"; then -+ if test "$linkmode" = prog; then - $ECHO "*** Warning: Linking the executable $output against the loadable module" - else - $ECHO "*** Warning: Linking the shared library $output against the loadable module" - fi - $ECHO "*** $linklib is not portable!" - fi -- if test lib = "$linkmode" && -- test yes = "$hardcode_into_libs"; then -+ if test "$linkmode" = lib && -+ test "$hardcode_into_libs" = yes; then - # Hardcode the library path. - # Skip directories that are in the system default run-time - # search path. -@@ -8193,43 +6750,43 @@ - # figure out the soname - set dummy $library_names - shift -- realname=$1 -+ realname="$1" - shift - libname=`eval "\\$ECHO \"$libname_spec\""` - # use dlname if we got it. it's perfectly good, no? - if test -n "$dlname"; then -- soname=$dlname -+ soname="$dlname" - elif test -n "$soname_spec"; then - # bleh windows - case $host in -- *cygwin* | mingw* | *cegcc* | *os2*) -+ *cygwin* | mingw* | *cegcc*) - func_arith $current - $age - major=$func_arith_result -- versuffix=-$major -+ versuffix="-$major" - ;; - esac - eval soname=\"$soname_spec\" - else -- soname=$realname -+ soname="$realname" - fi - - # Make a new name for the extract_expsyms_cmds to use -- soroot=$soname -+ soroot="$soname" - func_basename "$soroot" -- soname=$func_basename_result -+ soname="$func_basename_result" - func_stripname 'lib' '.dll' "$soname" - newlib=libimp-$func_stripname_result.a - - # If the library has no export list, then create one now - if test -f "$output_objdir/$soname-def"; then : - else -- func_verbose "extracting exported symbol list from '$soname'" -+ func_verbose "extracting exported symbol list from \`$soname'" - func_execute_cmds "$extract_expsyms_cmds" 'exit $?' - fi - - # Create $newlib - if test -f "$output_objdir/$newlib"; then :; else -- func_verbose "generating import library for '$soname'" -+ func_verbose "generating import library for \`$soname'" - func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?' - fi - # make sure the library variables are pointing to the new library -@@ -8237,58 +6794,58 @@ - linklib=$newlib - fi # test -n "$old_archive_from_expsyms_cmds" - -- if test prog = "$linkmode" || test relink != "$opt_mode"; then -+ if test "$linkmode" = prog || test "$opt_mode" != relink; then - add_shlibpath= - add_dir= - add= - lib_linked=yes - case $hardcode_action in - immediate | unsupported) -- if test no = "$hardcode_direct"; then -- add=$dir/$linklib -+ if test "$hardcode_direct" = no; then -+ add="$dir/$linklib" - case $host in -- *-*-sco3.2v5.0.[024]*) add_dir=-L$dir ;; -- *-*-sysv4*uw2*) add_dir=-L$dir ;; -+ *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;; -+ *-*-sysv4*uw2*) add_dir="-L$dir" ;; - *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ -- *-*-unixware7*) add_dir=-L$dir ;; -+ *-*-unixware7*) add_dir="-L$dir" ;; - *-*-darwin* ) -- # if the lib is a (non-dlopened) module then we cannot -+ # if the lib is a (non-dlopened) module then we can not - # link against it, someone is ignoring the earlier warnings - if /usr/bin/file -L $add 2> /dev/null | -- $GREP ": [^:]* bundle" >/dev/null; then -+ $GREP ": [^:]* bundle" >/dev/null ; then - if test "X$dlopenmodule" != "X$lib"; then - $ECHO "*** Warning: lib $linklib is a module, not a shared library" -- if test -z "$old_library"; then -+ if test -z "$old_library" ; then - echo - echo "*** And there doesn't seem to be a static archive available" - echo "*** The link will probably fail, sorry" - else -- add=$dir/$old_library -+ add="$dir/$old_library" - fi - elif test -n "$old_library"; then -- add=$dir/$old_library -+ add="$dir/$old_library" - fi - fi - esac -- elif test no = "$hardcode_minus_L"; then -+ elif test "$hardcode_minus_L" = no; then - case $host in -- *-*-sunos*) add_shlibpath=$dir ;; -+ *-*-sunos*) add_shlibpath="$dir" ;; - esac -- add_dir=-L$dir -- add=-l$name -- elif test no = "$hardcode_shlibpath_var"; then -- add_shlibpath=$dir -- add=-l$name -+ add_dir="-L$dir" -+ add="-l$name" -+ elif test "$hardcode_shlibpath_var" = no; then -+ add_shlibpath="$dir" -+ add="-l$name" - else - lib_linked=no - fi - ;; - relink) -- if test yes = "$hardcode_direct" && -- test no = "$hardcode_direct_absolute"; then -- add=$dir/$linklib -- elif test yes = "$hardcode_minus_L"; then -- add_dir=-L$absdir -+ if test "$hardcode_direct" = yes && -+ test "$hardcode_direct_absolute" = no; then -+ add="$dir/$linklib" -+ elif test "$hardcode_minus_L" = yes; then -+ add_dir="-L$absdir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in -@@ -8297,10 +6854,10 @@ - ;; - esac - fi -- add=-l$name -- elif test yes = "$hardcode_shlibpath_var"; then -- add_shlibpath=$dir -- add=-l$name -+ add="-l$name" -+ elif test "$hardcode_shlibpath_var" = yes; then -+ add_shlibpath="$dir" -+ add="-l$name" - else - lib_linked=no - fi -@@ -8308,7 +6865,7 @@ - *) lib_linked=no ;; - esac - -- if test yes != "$lib_linked"; then -+ if test "$lib_linked" != yes; then - func_fatal_configuration "unsupported hardcode properties" - fi - -@@ -8318,15 +6875,15 @@ - *) func_append compile_shlibpath "$add_shlibpath:" ;; - esac - fi -- if test prog = "$linkmode"; then -+ if test "$linkmode" = prog; then - test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" - test -n "$add" && compile_deplibs="$add $compile_deplibs" - else - test -n "$add_dir" && deplibs="$add_dir $deplibs" - test -n "$add" && deplibs="$add $deplibs" -- if test yes != "$hardcode_direct" && -- test yes != "$hardcode_minus_L" && -- test yes = "$hardcode_shlibpath_var"; then -+ if test "$hardcode_direct" != yes && -+ test "$hardcode_minus_L" != yes && -+ test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) func_append finalize_shlibpath "$libdir:" ;; -@@ -8335,33 +6892,33 @@ - fi - fi - -- if test prog = "$linkmode" || test relink = "$opt_mode"; then -+ if test "$linkmode" = prog || test "$opt_mode" = relink; then - add_shlibpath= - add_dir= - add= - # Finalize command for both is simple: just hardcode it. -- if test yes = "$hardcode_direct" && -- test no = "$hardcode_direct_absolute"; then -- add=$libdir/$linklib -- elif test yes = "$hardcode_minus_L"; then -- add_dir=-L$libdir -- add=-l$name -- elif test yes = "$hardcode_shlibpath_var"; then -+ if test "$hardcode_direct" = yes && -+ test "$hardcode_direct_absolute" = no; then -+ add="$libdir/$linklib" -+ elif test "$hardcode_minus_L" = yes; then -+ add_dir="-L$libdir" -+ add="-l$name" -+ elif test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) func_append finalize_shlibpath "$libdir:" ;; - esac -- add=-l$name -- elif test yes = "$hardcode_automatic"; then -+ add="-l$name" -+ elif test "$hardcode_automatic" = yes; then - if test -n "$inst_prefix_dir" && -- test -f "$inst_prefix_dir$libdir/$linklib"; then -- add=$inst_prefix_dir$libdir/$linklib -+ test -f "$inst_prefix_dir$libdir/$linklib" ; then -+ add="$inst_prefix_dir$libdir/$linklib" - else -- add=$libdir/$linklib -+ add="$libdir/$linklib" - fi - else - # We cannot seem to hardcode it, guess we'll fake it. -- add_dir=-L$libdir -+ add_dir="-L$libdir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in -@@ -8370,10 +6927,10 @@ - ;; - esac - fi -- add=-l$name -+ add="-l$name" - fi - -- if test prog = "$linkmode"; then -+ if test "$linkmode" = prog; then - test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" - test -n "$add" && finalize_deplibs="$add $finalize_deplibs" - else -@@ -8381,43 +6938,43 @@ - test -n "$add" && deplibs="$add $deplibs" - fi - fi -- elif test prog = "$linkmode"; then -+ elif test "$linkmode" = prog; then - # Here we assume that one of hardcode_direct or hardcode_minus_L - # is not unsupported. This is valid on all known static and - # shared platforms. -- if test unsupported != "$hardcode_direct"; then -- test -n "$old_library" && linklib=$old_library -+ if test "$hardcode_direct" != unsupported; then -+ test -n "$old_library" && linklib="$old_library" - compile_deplibs="$dir/$linklib $compile_deplibs" - finalize_deplibs="$dir/$linklib $finalize_deplibs" - else - compile_deplibs="-l$name -L$dir $compile_deplibs" - finalize_deplibs="-l$name -L$dir $finalize_deplibs" - fi -- elif test yes = "$build_libtool_libs"; then -+ elif test "$build_libtool_libs" = yes; then - # Not a shared library -- if test pass_all != "$deplibs_check_method"; then -+ if test "$deplibs_check_method" != pass_all; then - # We're trying link a shared library against a static one - # but the system doesn't support it. - - # Just print a warning and add the library to dependency_libs so - # that the program can be linked against the static library. - echo -- $ECHO "*** Warning: This system cannot link to static lib archive $lib." -+ $ECHO "*** Warning: This system can not link to static lib archive $lib." - echo "*** I have the capability to make that library automatically link in when" - echo "*** you link to this library. But I can only do this if you have a" - echo "*** shared version of the library, which you do not appear to have." -- if test yes = "$module"; then -+ if test "$module" = yes; then - echo "*** But as you try to build a module library, libtool will still create " - echo "*** a static module, that should work as long as the dlopening application" - echo "*** is linked with the -dlopen flag to resolve symbols at runtime." - if test -z "$global_symbol_pipe"; then - echo - echo "*** However, this would only work if libtool was able to extract symbol" -- echo "*** lists from a program, using 'nm' or equivalent, but libtool could" -+ echo "*** lists from a program, using \`nm' or equivalent, but libtool could" - echo "*** not find such a program. So, this module is probably useless." -- echo "*** 'nm' from GNU binutils and a full rebuild may help." -+ echo "*** \`nm' from GNU binutils and a full rebuild may help." - fi -- if test no = "$build_old_libs"; then -+ if test "$build_old_libs" = no; then - build_libtool_libs=module - build_old_libs=yes - else -@@ -8430,11 +6987,11 @@ - fi - fi # link shared/static library? - -- if test lib = "$linkmode"; then -+ if test "$linkmode" = lib; then - if test -n "$dependency_libs" && -- { test yes != "$hardcode_into_libs" || -- test yes = "$build_old_libs" || -- test yes = "$link_static"; }; then -+ { test "$hardcode_into_libs" != yes || -+ test "$build_old_libs" = yes || -+ test "$link_static" = yes; }; then - # Extract -R from dependency_libs - temp_deplibs= - for libdir in $dependency_libs; do -@@ -8448,12 +7005,12 @@ - *) func_append temp_deplibs " $libdir";; - esac - done -- dependency_libs=$temp_deplibs -+ dependency_libs="$temp_deplibs" - fi - - func_append newlib_search_path " $absdir" - # Link against this library -- test no = "$link_static" && newdependency_libs="$abs_ladir/$laname $newdependency_libs" -+ test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" - # ... and its dependency_libs - tmp_libs= - for deplib in $dependency_libs; do -@@ -8463,7 +7020,7 @@ - func_resolve_sysroot "$func_stripname_result";; - *) func_resolve_sysroot "$deplib" ;; - esac -- if $opt_preserve_dup_deps; then -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in - *" $func_resolve_sysroot_result "*) - func_append specialdeplibs " $func_resolve_sysroot_result" ;; -@@ -8472,12 +7029,12 @@ - func_append tmp_libs " $func_resolve_sysroot_result" - done - -- if test no != "$link_all_deplibs"; then -+ if test "$link_all_deplibs" != no; then - # Add the search paths of all dependency libraries - for deplib in $dependency_libs; do - path= - case $deplib in -- -L*) path=$deplib ;; -+ -L*) path="$deplib" ;; - *.la) - func_resolve_sysroot "$deplib" - deplib=$func_resolve_sysroot_result -@@ -8485,12 +7042,12 @@ - dir=$func_dirname_result - # We need an absolute path. - case $dir in -- [\\/]* | [A-Za-z]:[\\/]*) absdir=$dir ;; -+ [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; - *) - absdir=`cd "$dir" && pwd` - if test -z "$absdir"; then -- func_warning "cannot determine absolute directory name of '$dir'" -- absdir=$dir -+ func_warning "cannot determine absolute directory name of \`$dir'" -+ absdir="$dir" - fi - ;; - esac -@@ -8498,35 +7055,35 @@ - case $host in - *-*-darwin*) - depdepl= -- eval deplibrary_names=`$SED -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` -- if test -n "$deplibrary_names"; then -- for tmp in $deplibrary_names; do -+ eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` -+ if test -n "$deplibrary_names" ; then -+ for tmp in $deplibrary_names ; do - depdepl=$tmp - done -- if test -f "$absdir/$objdir/$depdepl"; then -- depdepl=$absdir/$objdir/$depdepl -- darwin_install_name=`$OTOOL -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` -+ if test -f "$absdir/$objdir/$depdepl" ; then -+ depdepl="$absdir/$objdir/$depdepl" -+ darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` - if test -z "$darwin_install_name"; then -- darwin_install_name=`$OTOOL64 -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` -+ darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` - fi -- func_append compiler_flags " $wl-dylib_file $wl$darwin_install_name:$depdepl" -- func_append linker_flags " -dylib_file $darwin_install_name:$depdepl" -+ func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" -+ func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}" - path= - fi - fi - ;; - *) -- path=-L$absdir/$objdir -+ path="-L$absdir/$objdir" - ;; - esac - else -- eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` - test -z "$libdir" && \ -- func_fatal_error "'$deplib' is not a valid libtool archive" -+ func_fatal_error "\`$deplib' is not a valid libtool archive" - test "$absdir" != "$libdir" && \ -- func_warning "'$deplib' seems to be moved" -+ func_warning "\`$deplib' seems to be moved" - -- path=-L$absdir -+ path="-L$absdir" - fi - ;; - esac -@@ -8538,23 +7095,23 @@ - fi # link_all_deplibs != no - fi # linkmode = lib - done # for deplib in $libs -- if test link = "$pass"; then -- if test prog = "$linkmode"; then -+ if test "$pass" = link; then -+ if test "$linkmode" = "prog"; then - compile_deplibs="$new_inherited_linker_flags $compile_deplibs" - finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs" - else - compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` - fi - fi -- dependency_libs=$newdependency_libs -- if test dlpreopen = "$pass"; then -+ dependency_libs="$newdependency_libs" -+ if test "$pass" = dlpreopen; then - # Link the dlpreopened libraries before other libraries - for deplib in $save_deplibs; do - deplibs="$deplib $deplibs" - done - fi -- if test dlopen != "$pass"; then -- test conv = "$pass" || { -+ if test "$pass" != dlopen; then -+ if test "$pass" != conv; then - # Make sure lib_search_path contains only unique directories. - lib_search_path= - for dir in $newlib_search_path; do -@@ -8564,12 +7121,12 @@ - esac - done - newlib_search_path= -- } -+ fi - -- if test prog,link = "$linkmode,$pass"; then -- vars="compile_deplibs finalize_deplibs" -+ if test "$linkmode,$pass" != "prog,link"; then -+ vars="deplibs" - else -- vars=deplibs -+ vars="compile_deplibs finalize_deplibs" - fi - for var in $vars dependency_libs; do - # Add libraries to $var in reverse order -@@ -8627,93 +7184,62 @@ - eval $var=\"$tmp_libs\" - done # for var - fi -- -- # Add Sun CC postdeps if required: -- test CXX = "$tagname" && { -- case $host_os in -- linux*) -- case `$CC -V 2>&1 | sed 5q` in -- *Sun\ C*) # Sun C++ 5.9 -- func_suncc_cstd_abi -- -- if test no != "$suncc_use_cstd_abi"; then -- func_append postdeps ' -library=Cstd -library=Crun' -- fi -- ;; -- esac -- ;; -- -- solaris*) -- func_cc_basename "$CC" -- case $func_cc_basename_result in -- CC* | sunCC*) -- func_suncc_cstd_abi -- -- if test no != "$suncc_use_cstd_abi"; then -- func_append postdeps ' -library=Cstd -library=Crun' -- fi -- ;; -- esac -- ;; -- esac -- } -- - # Last step: remove runtime libs from dependency_libs - # (they stay in deplibs) - tmp_libs= -- for i in $dependency_libs; do -+ for i in $dependency_libs ; do - case " $predeps $postdeps $compiler_lib_search_path " in - *" $i "*) -- i= -+ i="" - ;; - esac -- if test -n "$i"; then -+ if test -n "$i" ; then - func_append tmp_libs " $i" - fi - done - dependency_libs=$tmp_libs - done # for pass -- if test prog = "$linkmode"; then -- dlfiles=$newdlfiles -+ if test "$linkmode" = prog; then -+ dlfiles="$newdlfiles" - fi -- if test prog = "$linkmode" || test lib = "$linkmode"; then -- dlprefiles=$newdlprefiles -+ if test "$linkmode" = prog || test "$linkmode" = lib; then -+ dlprefiles="$newdlprefiles" - fi - - case $linkmode in - oldlib) -- if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then -- func_warning "'-dlopen' is ignored for archives" -+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then -+ func_warning "\`-dlopen' is ignored for archives" - fi - - case " $deplibs" in - *\ -l* | *\ -L*) -- func_warning "'-l' and '-L' are ignored for archives" ;; -+ func_warning "\`-l' and \`-L' are ignored for archives" ;; - esac - - test -n "$rpath" && \ -- func_warning "'-rpath' is ignored for archives" -+ func_warning "\`-rpath' is ignored for archives" - - test -n "$xrpath" && \ -- func_warning "'-R' is ignored for archives" -+ func_warning "\`-R' is ignored for archives" - - test -n "$vinfo" && \ -- func_warning "'-version-info/-version-number' is ignored for archives" -+ func_warning "\`-version-info/-version-number' is ignored for archives" - - test -n "$release" && \ -- func_warning "'-release' is ignored for archives" -+ func_warning "\`-release' is ignored for archives" - - test -n "$export_symbols$export_symbols_regex" && \ -- func_warning "'-export-symbols' is ignored for archives" -+ func_warning "\`-export-symbols' is ignored for archives" - - # Now set the variables for building old libraries. - build_libtool_libs=no -- oldlibs=$output -+ oldlibs="$output" - func_append objs "$old_deplibs" - ;; - - lib) -- # Make sure we only generate libraries of the form 'libNAME.la'. -+ # Make sure we only generate libraries of the form `libNAME.la'. - case $outputname in - lib*) - func_stripname 'lib' '.la' "$outputname" -@@ -8722,10 +7248,10 @@ - eval libname=\"$libname_spec\" - ;; - *) -- test no = "$module" \ -- && func_fatal_help "libtool library '$output' must begin with 'lib'" -+ test "$module" = no && \ -+ func_fatal_help "libtool library \`$output' must begin with \`lib'" - -- if test no != "$need_lib_prefix"; then -+ if test "$need_lib_prefix" != no; then - # Add the "lib" prefix for modules if required - func_stripname '' '.la' "$outputname" - name=$func_stripname_result -@@ -8739,8 +7265,8 @@ - esac - - if test -n "$objs"; then -- if test pass_all != "$deplibs_check_method"; then -- func_fatal_error "cannot build libtool library '$output' from non-libtool objects on this host:$objs" -+ if test "$deplibs_check_method" != pass_all; then -+ func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs" - else - echo - $ECHO "*** Warning: Linking the shared library $output against the non-libtool" -@@ -8749,21 +7275,21 @@ - fi - fi - -- test no = "$dlself" \ -- || func_warning "'-dlopen self' is ignored for libtool libraries" -+ test "$dlself" != no && \ -+ func_warning "\`-dlopen self' is ignored for libtool libraries" - - set dummy $rpath - shift -- test 1 -lt "$#" \ -- && func_warning "ignoring multiple '-rpath's for a libtool library" -+ test "$#" -gt 1 && \ -+ func_warning "ignoring multiple \`-rpath's for a libtool library" - -- install_libdir=$1 -+ install_libdir="$1" - - oldlibs= - if test -z "$rpath"; then -- if test yes = "$build_libtool_libs"; then -+ if test "$build_libtool_libs" = yes; then - # Building a libtool convenience library. -- # Some compilers have problems with a '.al' extension so -+ # Some compilers have problems with a `.al' extension so - # convenience libraries should have the same extension an - # archive normally would. - oldlibs="$output_objdir/$libname.$libext $oldlibs" -@@ -8772,20 +7298,20 @@ - fi - - test -n "$vinfo" && \ -- func_warning "'-version-info/-version-number' is ignored for convenience libraries" -+ func_warning "\`-version-info/-version-number' is ignored for convenience libraries" - - test -n "$release" && \ -- func_warning "'-release' is ignored for convenience libraries" -+ func_warning "\`-release' is ignored for convenience libraries" - else - - # Parse the version information argument. -- save_ifs=$IFS; IFS=: -+ save_ifs="$IFS"; IFS=':' - set dummy $vinfo 0 0 0 - shift -- IFS=$save_ifs -+ IFS="$save_ifs" - - test -n "$7" && \ -- func_fatal_help "too many parameters to '-version-info'" -+ func_fatal_help "too many parameters to \`-version-info'" - - # convert absolute version numbers to libtool ages - # this retains compatibility with .la files and attempts -@@ -8793,42 +7319,42 @@ - - case $vinfo_number in - yes) -- number_major=$1 -- number_minor=$2 -- number_revision=$3 -+ number_major="$1" -+ number_minor="$2" -+ number_revision="$3" - # - # There are really only two kinds -- those that - # use the current revision as the major version - # and those that subtract age and use age as - # a minor version. But, then there is irix -- # that has an extra 1 added just for fun -+ # which has an extra 1 added just for fun - # - case $version_type in - # correct linux to gnu/linux during the next big refactor -- darwin|freebsd-elf|linux|osf|windows|none) -+ darwin|linux|osf|windows|none) - func_arith $number_major + $number_minor - current=$func_arith_result -- age=$number_minor -- revision=$number_revision -+ age="$number_minor" -+ revision="$number_revision" - ;; -- freebsd-aout|qnx|sunos) -- current=$number_major -- revision=$number_minor -- age=0 -+ freebsd-aout|freebsd-elf|qnx|sunos) -+ current="$number_major" -+ revision="$number_minor" -+ age="0" - ;; - irix|nonstopux) - func_arith $number_major + $number_minor - current=$func_arith_result -- age=$number_minor -- revision=$number_minor -+ age="$number_minor" -+ revision="$number_minor" - lt_irix_increment=no - ;; - esac - ;; - no) -- current=$1 -- revision=$2 -- age=$3 -+ current="$1" -+ revision="$2" -+ age="$3" - ;; - esac - -@@ -8836,30 +7362,30 @@ - case $current in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) -- func_error "CURRENT '$current' must be a nonnegative integer" -- func_fatal_error "'$vinfo' is not valid version information" -+ func_error "CURRENT \`$current' must be a nonnegative integer" -+ func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - case $revision in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) -- func_error "REVISION '$revision' must be a nonnegative integer" -- func_fatal_error "'$vinfo' is not valid version information" -+ func_error "REVISION \`$revision' must be a nonnegative integer" -+ func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - case $age in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) -- func_error "AGE '$age' must be a nonnegative integer" -- func_fatal_error "'$vinfo' is not valid version information" -+ func_error "AGE \`$age' must be a nonnegative integer" -+ func_fatal_error "\`$vinfo' is not valid version information" - ;; - esac - - if test "$age" -gt "$current"; then -- func_error "AGE '$age' is greater than the current interface number '$current'" -- func_fatal_error "'$vinfo' is not valid version information" -+ func_error "AGE \`$age' is greater than the current interface number \`$current'" -+ func_fatal_error "\`$vinfo' is not valid version information" - fi - - # Calculate the version variables. -@@ -8874,36 +7400,26 @@ - # verstring for coding it into the library header - func_arith $current - $age - major=.$func_arith_result -- versuffix=$major.$age.$revision -+ versuffix="$major.$age.$revision" - # Darwin ld doesn't like 0 for these options... - func_arith $current + 1 - minor_current=$func_arith_result -- xlcverstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" -+ xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision" - verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" -- # On Darwin other compilers -- case $CC in -- nagfor*) -- verstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" -- ;; -- *) -- verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" -- ;; -- esac - ;; - - freebsd-aout) -- major=.$current -- versuffix=.$current.$revision -+ major=".$current" -+ versuffix=".$current.$revision"; - ;; - - freebsd-elf) -- func_arith $current - $age -- major=.$func_arith_result -- versuffix=$major.$age.$revision -+ major=".$current" -+ versuffix=".$current" - ;; - - irix | nonstopux) -- if test no = "$lt_irix_increment"; then -+ if test "X$lt_irix_increment" = "Xno"; then - func_arith $current - $age - else - func_arith $current - $age + 1 -@@ -8914,74 +7430,69 @@ - nonstopux) verstring_prefix=nonstopux ;; - *) verstring_prefix=sgi ;; - esac -- verstring=$verstring_prefix$major.$revision -+ verstring="$verstring_prefix$major.$revision" - - # Add in all the interfaces that we are compatible with. - loop=$revision -- while test 0 -ne "$loop"; do -+ while test "$loop" -ne 0; do - func_arith $revision - $loop - iface=$func_arith_result - func_arith $loop - 1 - loop=$func_arith_result -- verstring=$verstring_prefix$major.$iface:$verstring -+ verstring="$verstring_prefix$major.$iface:$verstring" - done - -- # Before this point, $major must not contain '.'. -+ # Before this point, $major must not contain `.'. - major=.$major -- versuffix=$major.$revision -+ versuffix="$major.$revision" - ;; - - linux) # correct to gnu/linux during the next big refactor - func_arith $current - $age - major=.$func_arith_result -- versuffix=$major.$age.$revision -+ versuffix="$major.$age.$revision" - ;; - - osf) - func_arith $current - $age - major=.$func_arith_result -- versuffix=.$current.$age.$revision -- verstring=$current.$age.$revision -+ versuffix=".$current.$age.$revision" -+ verstring="$current.$age.$revision" - - # Add in all the interfaces that we are compatible with. - loop=$age -- while test 0 -ne "$loop"; do -+ while test "$loop" -ne 0; do - func_arith $current - $loop - iface=$func_arith_result - func_arith $loop - 1 - loop=$func_arith_result -- verstring=$verstring:$iface.0 -+ verstring="$verstring:${iface}.0" - done - - # Make executables depend on our current version. -- func_append verstring ":$current.0" -+ func_append verstring ":${current}.0" - ;; - - qnx) -- major=.$current -- versuffix=.$current -- ;; -- -- sco) -- major=.$current -- versuffix=.$current -+ major=".$current" -+ versuffix=".$current" - ;; - - sunos) -- major=.$current -- versuffix=.$current.$revision -+ major=".$current" -+ versuffix=".$current.$revision" - ;; - - windows) - # Use '-' rather than '.', since we only want one -- # extension on DOS 8.3 file systems. -+ # extension on DOS 8.3 filesystems. - func_arith $current - $age - major=$func_arith_result -- versuffix=-$major -+ versuffix="-$major" - ;; - - *) -- func_fatal_configuration "unknown library version type '$version_type'" -+ func_fatal_configuration "unknown library version type \`$version_type'" - ;; - esac - -@@ -8995,45 +7506,42 @@ - verstring= - ;; - *) -- verstring=0.0 -+ verstring="0.0" - ;; - esac -- if test no = "$need_version"; then -+ if test "$need_version" = no; then - versuffix= - else -- versuffix=.0.0 -+ versuffix=".0.0" - fi - fi - - # Remove version info from name if versioning should be avoided -- if test yes,no = "$avoid_version,$need_version"; then -+ if test "$avoid_version" = yes && test "$need_version" = no; then - major= - versuffix= -- verstring= -+ verstring="" - fi - - # Check to see if the archive will have undefined symbols. -- if test yes = "$allow_undefined"; then -- if test unsupported = "$allow_undefined_flag"; then -- if test yes = "$build_old_libs"; then -- func_warning "undefined symbols not allowed in $host shared libraries; building static only" -- build_libtool_libs=no -- else -- func_fatal_error "can't build $host shared library unless -no-undefined is specified" -- fi -+ if test "$allow_undefined" = yes; then -+ if test "$allow_undefined_flag" = unsupported; then -+ func_warning "undefined symbols not allowed in $host shared libraries" -+ build_libtool_libs=no -+ build_old_libs=yes - fi - else - # Don't allow undefined symbols. -- allow_undefined_flag=$no_undefined_flag -+ allow_undefined_flag="$no_undefined_flag" - fi - - fi - -- func_generate_dlsyms "$libname" "$libname" : -+ func_generate_dlsyms "$libname" "$libname" "yes" - func_append libobjs " $symfileobj" -- test " " = "$libobjs" && libobjs= -+ test "X$libobjs" = "X " && libobjs= - -- if test relink != "$opt_mode"; then -+ if test "$opt_mode" != relink; then - # Remove our outputs, but don't remove object files since they - # may have been created when compiling PIC objects. - removelist= -@@ -9042,8 +7550,8 @@ - case $p in - *.$objext | *.gcno) - ;; -- $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/$libname$release.*) -- if test -n "$precious_files_regex"; then -+ $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*) -+ if test "X$precious_files_regex" != "X"; then - if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 - then - continue -@@ -9059,11 +7567,11 @@ - fi - - # Now set the variables for building old libraries. -- if test yes = "$build_old_libs" && test convenience != "$build_libtool_libs"; then -+ if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then - func_append oldlibs " $output_objdir/$libname.$libext" - - # Transform .lo files to .o files. -- oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; $lo2o" | $NL2SP` -+ oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP` - fi - - # Eliminate all temporary directories. -@@ -9084,13 +7592,13 @@ - *) func_append finalize_rpath " $libdir" ;; - esac - done -- if test yes != "$hardcode_into_libs" || test yes = "$build_old_libs"; then -+ if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then - dependency_libs="$temp_xrpath $dependency_libs" - fi - fi - - # Make sure dlfiles contains only unique files that won't be dlpreopened -- old_dlfiles=$dlfiles -+ old_dlfiles="$dlfiles" - dlfiles= - for lib in $old_dlfiles; do - case " $dlprefiles $dlfiles " in -@@ -9100,7 +7608,7 @@ - done - - # Make sure dlprefiles contains only unique files -- old_dlprefiles=$dlprefiles -+ old_dlprefiles="$dlprefiles" - dlprefiles= - for lib in $old_dlprefiles; do - case "$dlprefiles " in -@@ -9109,7 +7617,7 @@ - esac - done - -- if test yes = "$build_libtool_libs"; then -+ if test "$build_libtool_libs" = yes; then - if test -n "$rpath"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*) -@@ -9133,7 +7641,7 @@ - ;; - *) - # Add libc to deplibs on all other systems if necessary. -- if test yes = "$build_libtool_need_lc"; then -+ if test "$build_libtool_need_lc" = "yes"; then - func_append deplibs " -lc" - fi - ;; -@@ -9149,9 +7657,9 @@ - # I'm not sure if I'm treating the release correctly. I think - # release should show up in the -l (ie -lgmp5) so we don't want to - # add it in twice. Is that correct? -- release= -- versuffix= -- major= -+ release="" -+ versuffix="" -+ major="" - newdeplibs= - droppeddeps=no - case $deplibs_check_method in -@@ -9180,20 +7688,20 @@ - -l*) - func_stripname -l '' "$i" - name=$func_stripname_result -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) - func_append newdeplibs " $i" -- i= -+ i="" - ;; - esac - fi -- if test -n "$i"; then -+ if test -n "$i" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` - set dummy $deplib_matches; shift - deplib_match=$1 -- if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0; then -+ if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then - func_append newdeplibs " $i" - else - droppeddeps=yes -@@ -9223,20 +7731,20 @@ - $opt_dry_run || $RM conftest - if $LTCC $LTCFLAGS -o conftest conftest.c $i; then - ldd_output=`ldd conftest` -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) - func_append newdeplibs " $i" -- i= -+ i="" - ;; - esac - fi -- if test -n "$i"; then -+ if test -n "$i" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` - set dummy $deplib_matches; shift - deplib_match=$1 -- if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0; then -+ if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then - func_append newdeplibs " $i" - else - droppeddeps=yes -@@ -9273,24 +7781,24 @@ - -l*) - func_stripname -l '' "$a_deplib" - name=$func_stripname_result -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) - func_append newdeplibs " $a_deplib" -- a_deplib= -+ a_deplib="" - ;; - esac - fi -- if test -n "$a_deplib"; then -+ if test -n "$a_deplib" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - if test -n "$file_magic_glob"; then - libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob` - else - libnameglob=$libname - fi -- test yes = "$want_nocaseglob" && nocaseglob=`shopt -p nocaseglob` -+ test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do -- if test yes = "$want_nocaseglob"; then -+ if test "$want_nocaseglob" = yes; then - shopt -s nocaseglob - potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` - $nocaseglob -@@ -9308,25 +7816,25 @@ - # We might still enter an endless loop, since a link - # loop can be closed while we follow links, - # but so what? -- potlib=$potent_lib -+ potlib="$potent_lib" - while test -h "$potlib" 2>/dev/null; do -- potliblink=`ls -ld $potlib | $SED 's/.* -> //'` -+ potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'` - case $potliblink in -- [\\/]* | [A-Za-z]:[\\/]*) potlib=$potliblink;; -- *) potlib=`$ECHO "$potlib" | $SED 's|[^/]*$||'`"$potliblink";; -+ [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";; -+ *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";; - esac - done - if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | - $SED -e 10q | - $EGREP "$file_magic_regex" > /dev/null; then - func_append newdeplibs " $a_deplib" -- a_deplib= -+ a_deplib="" - break 2 - fi - done - done - fi -- if test -n "$a_deplib"; then -+ if test -n "$a_deplib" ; then - droppeddeps=yes - echo - $ECHO "*** Warning: linker path does not have real file for library $a_deplib." -@@ -9334,7 +7842,7 @@ - echo "*** you link to this library. But I can only do this if you have a" - echo "*** shared version of the library, which you do not appear to have" - echo "*** because I did check the linker path looking for a file starting" -- if test -z "$potlib"; then -+ if test -z "$potlib" ; then - $ECHO "*** with $libname but no candidates were found. (...for file magic test)" - else - $ECHO "*** with $libname and none of the candidates passed a file format test" -@@ -9357,30 +7865,30 @@ - -l*) - func_stripname -l '' "$a_deplib" - name=$func_stripname_result -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) - func_append newdeplibs " $a_deplib" -- a_deplib= -+ a_deplib="" - ;; - esac - fi -- if test -n "$a_deplib"; then -+ if test -n "$a_deplib" ; then - libname=`eval "\\$ECHO \"$libname_spec\""` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do - potential_libs=`ls $i/$libname[.-]* 2>/dev/null` - for potent_lib in $potential_libs; do -- potlib=$potent_lib # see symlink-check above in file_magic test -+ potlib="$potent_lib" # see symlink-check above in file_magic test - if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \ - $EGREP "$match_pattern_regex" > /dev/null; then - func_append newdeplibs " $a_deplib" -- a_deplib= -+ a_deplib="" - break 2 - fi - done - done - fi -- if test -n "$a_deplib"; then -+ if test -n "$a_deplib" ; then - droppeddeps=yes - echo - $ECHO "*** Warning: linker path does not have real file for library $a_deplib." -@@ -9388,7 +7896,7 @@ - echo "*** you link to this library. But I can only do this if you have a" - echo "*** shared version of the library, which you do not appear to have" - echo "*** because I did check the linker path looking for a file starting" -- if test -z "$potlib"; then -+ if test -z "$potlib" ; then - $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)" - else - $ECHO "*** with $libname and none of the candidates passed a file format test" -@@ -9404,18 +7912,18 @@ - done # Gone through all deplibs. - ;; - none | unknown | *) -- newdeplibs= -+ newdeplibs="" - tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'` -- if test yes = "$allow_libtool_libs_with_static_runtimes"; then -- for i in $predeps $postdeps; do -+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then -+ for i in $predeps $postdeps ; do - # can't use Xsed below, because $i might contain '/' -- tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s|$i||"` -+ tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"` - done - fi - case $tmp_deplibs in - *[!\ \ ]*) - echo -- if test none = "$deplibs_check_method"; then -+ if test "X$deplibs_check_method" = "Xnone"; then - echo "*** Warning: inter-library dependencies are not supported in this platform." - else - echo "*** Warning: inter-library dependencies are not known to be supported." -@@ -9439,8 +7947,8 @@ - ;; - esac - -- if test yes = "$droppeddeps"; then -- if test yes = "$module"; then -+ if test "$droppeddeps" = yes; then -+ if test "$module" = yes; then - echo - echo "*** Warning: libtool could not satisfy all declared inter-library" - $ECHO "*** dependencies of module $libname. Therefore, libtool will create" -@@ -9449,12 +7957,12 @@ - if test -z "$global_symbol_pipe"; then - echo - echo "*** However, this would only work if libtool was able to extract symbol" -- echo "*** lists from a program, using 'nm' or equivalent, but libtool could" -+ echo "*** lists from a program, using \`nm' or equivalent, but libtool could" - echo "*** not find such a program. So, this module is probably useless." -- echo "*** 'nm' from GNU binutils and a full rebuild may help." -+ echo "*** \`nm' from GNU binutils and a full rebuild may help." - fi -- if test no = "$build_old_libs"; then -- oldlibs=$output_objdir/$libname.$libext -+ if test "$build_old_libs" = no; then -+ oldlibs="$output_objdir/$libname.$libext" - build_libtool_libs=module - build_old_libs=yes - else -@@ -9465,14 +7973,14 @@ - echo "*** automatically added whenever a program is linked with this library" - echo "*** or is declared to -dlopen it." - -- if test no = "$allow_undefined"; then -+ if test "$allow_undefined" = no; then - echo - echo "*** Since this library must not contain undefined symbols," - echo "*** because either the platform does not support them or" - echo "*** it was explicitly requested with -no-undefined," - echo "*** libtool will only create a static version of it." -- if test no = "$build_old_libs"; then -- oldlibs=$output_objdir/$libname.$libext -+ if test "$build_old_libs" = no; then -+ oldlibs="$output_objdir/$libname.$libext" - build_libtool_libs=module - build_old_libs=yes - else -@@ -9518,7 +8026,7 @@ - *) func_append new_libs " $deplib" ;; - esac - done -- deplibs=$new_libs -+ deplibs="$new_libs" - - # All the library-specific variables (install_libdir is set above). - library_names= -@@ -9526,25 +8034,25 @@ - dlname= - - # Test again, we may have decided not to build it any more -- if test yes = "$build_libtool_libs"; then -- # Remove $wl instances when linking with ld. -+ if test "$build_libtool_libs" = yes; then -+ # Remove ${wl} instances when linking with ld. - # FIXME: should test the right _cmds variable. - case $archive_cmds in - *\$LD\ *) wl= ;; - esac -- if test yes = "$hardcode_into_libs"; then -+ if test "$hardcode_into_libs" = yes; then - # Hardcode the library paths - hardcode_libdirs= - dep_rpath= -- rpath=$finalize_rpath -- test relink = "$opt_mode" || rpath=$compile_rpath$rpath -+ rpath="$finalize_rpath" -+ test "$opt_mode" != relink && rpath="$compile_rpath$rpath" - for libdir in $rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - func_replace_sysroot "$libdir" - libdir=$func_replace_sysroot_result - if test -z "$hardcode_libdirs"; then -- hardcode_libdirs=$libdir -+ hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in -@@ -9569,7 +8077,7 @@ - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then -- libdir=$hardcode_libdirs -+ libdir="$hardcode_libdirs" - eval "dep_rpath=\"$hardcode_libdir_flag_spec\"" - fi - if test -n "$runpath_var" && test -n "$perm_rpath"; then -@@ -9583,8 +8091,8 @@ - test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" - fi - -- shlibpath=$finalize_shlibpath -- test relink = "$opt_mode" || shlibpath=$compile_shlibpath$shlibpath -+ shlibpath="$finalize_shlibpath" -+ test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath" - if test -n "$shlibpath"; then - eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" - fi -@@ -9594,19 +8102,19 @@ - eval library_names=\"$library_names_spec\" - set dummy $library_names - shift -- realname=$1 -+ realname="$1" - shift - - if test -n "$soname_spec"; then - eval soname=\"$soname_spec\" - else -- soname=$realname -+ soname="$realname" - fi - if test -z "$dlname"; then - dlname=$soname - fi - -- lib=$output_objdir/$realname -+ lib="$output_objdir/$realname" - linknames= - for link - do -@@ -9620,7 +8128,7 @@ - delfiles= - if test -n "$export_symbols" && test -n "$include_expsyms"; then - $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" -- export_symbols=$output_objdir/$libname.uexp -+ export_symbols="$output_objdir/$libname.uexp" - func_append delfiles " $export_symbols" - fi - -@@ -9629,31 +8137,31 @@ - cygwin* | mingw* | cegcc*) - if test -n "$export_symbols" && test -z "$export_symbols_regex"; then - # exporting using user supplied symfile -- func_dll_def_p "$export_symbols" || { -+ if test "x`$SED 1q $export_symbols`" != xEXPORTS; then - # and it's NOT already a .def file. Must figure out - # which of the given symbols are data symbols and tag - # them as such. So, trigger use of export_symbols_cmds. - # export_symbols gets reassigned inside the "prepare - # the list of exported symbols" if statement, so the - # include_expsyms logic still works. -- orig_export_symbols=$export_symbols -+ orig_export_symbols="$export_symbols" - export_symbols= - always_export_symbols=yes -- } -+ fi - fi - ;; - esac - - # Prepare the list of exported symbols - if test -z "$export_symbols"; then -- if test yes = "$always_export_symbols" || test -n "$export_symbols_regex"; then -- func_verbose "generating symbol list for '$libname.la'" -- export_symbols=$output_objdir/$libname.exp -+ if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then -+ func_verbose "generating symbol list for \`$libname.la'" -+ export_symbols="$output_objdir/$libname.exp" - $opt_dry_run || $RM $export_symbols - cmds=$export_symbols_cmds -- save_ifs=$IFS; IFS='~' -+ save_ifs="$IFS"; IFS='~' - for cmd1 in $cmds; do -- IFS=$save_ifs -+ IFS="$save_ifs" - # Take the normal branch if the nm_file_list_spec branch - # doesn't work or if tool conversion is not needed. - case $nm_file_list_spec~$to_tool_file_cmd in -@@ -9667,7 +8175,7 @@ - try_normal_branch=no - ;; - esac -- if test yes = "$try_normal_branch" \ -+ if test "$try_normal_branch" = yes \ - && { test "$len" -lt "$max_cmd_len" \ - || test "$max_cmd_len" -le -1; } - then -@@ -9678,7 +8186,7 @@ - output_la=$func_basename_result - save_libobjs=$libobjs - save_output=$output -- output=$output_objdir/$output_la.nm -+ output=${output_objdir}/${output_la}.nm - func_to_tool_file "$output" - libobjs=$nm_file_list_spec$func_to_tool_file_result - func_append delfiles " $output" -@@ -9701,8 +8209,8 @@ - break - fi - done -- IFS=$save_ifs -- if test -n "$export_symbols_regex" && test : != "$skipped_export"; then -+ IFS="$save_ifs" -+ if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then - func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' - func_show_eval '$MV "${export_symbols}T" "$export_symbols"' - fi -@@ -9710,16 +8218,16 @@ - fi - - if test -n "$export_symbols" && test -n "$include_expsyms"; then -- tmp_export_symbols=$export_symbols -- test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols -+ tmp_export_symbols="$export_symbols" -+ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" - $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' - fi - -- if test : != "$skipped_export" && test -n "$orig_export_symbols"; then -+ if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then - # The given exports_symbols file has to be filtered, so filter it. -- func_verbose "filter symbol list for '$libname.la' to tag DATA exports" -+ func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" - # FIXME: $output_objdir/$libname.filter potentially contains lots of -- # 's' commands, which not all seds can handle. GNU sed should be fine -+ # 's' commands which not all seds can handle. GNU sed should be fine - # though. Also, the filter scales superlinearly with the number of - # global variables. join(1) would be nice here, but unfortunately - # isn't a blessed tool. -@@ -9738,11 +8246,11 @@ - ;; - esac - done -- deplibs=$tmp_deplibs -+ deplibs="$tmp_deplibs" - - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec" && -- test yes = "$compiler_needs_object" && -+ test "$compiler_needs_object" = yes && - test -z "$libobjs"; then - # extract the archives, so we have objects to list. - # TODO: could optimize this to just extract one archive. -@@ -9753,7 +8261,7 @@ - eval libobjs=\"\$libobjs $whole_archive_flag_spec\" - test "X$libobjs" = "X " && libobjs= - else -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - - func_extract_archives $gentop $convenience -@@ -9762,18 +8270,18 @@ - fi - fi - -- if test yes = "$thread_safe" && test -n "$thread_safe_flag_spec"; then -+ if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then - eval flag=\"$thread_safe_flag_spec\" - func_append linker_flags " $flag" - fi - - # Make a backup of the uninstalled library when relinking -- if test relink = "$opt_mode"; then -+ if test "$opt_mode" = relink; then - $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? - fi - - # Do each of the archive commands. -- if test yes = "$module" && test -n "$module_cmds"; then -+ if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then - eval test_cmds=\"$module_expsym_cmds\" - cmds=$module_expsym_cmds -@@ -9791,7 +8299,7 @@ - fi - fi - -- if test : != "$skipped_export" && -+ if test "X$skipped_export" != "X:" && - func_len " $test_cmds" && - len=$func_len_result && - test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then -@@ -9824,8 +8332,8 @@ - last_robj= - k=1 - -- if test -n "$save_libobjs" && test : != "$skipped_export" && test yes = "$with_gnu_ld"; then -- output=$output_objdir/$output_la.lnkscript -+ if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then -+ output=${output_objdir}/${output_la}.lnkscript - func_verbose "creating GNU ld script: $output" - echo 'INPUT (' > $output - for obj in $save_libobjs -@@ -9837,14 +8345,14 @@ - func_append delfiles " $output" - func_to_tool_file "$output" - output=$func_to_tool_file_result -- elif test -n "$save_libobjs" && test : != "$skipped_export" && test -n "$file_list_spec"; then -- output=$output_objdir/$output_la.lnk -+ elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then -+ output=${output_objdir}/${output_la}.lnk - func_verbose "creating linker input file list: $output" - : > $output - set x $save_libobjs - shift - firstobj= -- if test yes = "$compiler_needs_object"; then -+ if test "$compiler_needs_object" = yes; then - firstobj="$1 " - shift - fi -@@ -9859,7 +8367,7 @@ - else - if test -n "$save_libobjs"; then - func_verbose "creating reloadable object files..." -- output=$output_objdir/$output_la-$k.$objext -+ output=$output_objdir/$output_la-${k}.$objext - eval test_cmds=\"$reload_cmds\" - func_len " $test_cmds" - len0=$func_len_result -@@ -9871,13 +8379,13 @@ - func_len " $obj" - func_arith $len + $func_len_result - len=$func_arith_result -- if test -z "$objlist" || -+ if test "X$objlist" = X || - test "$len" -lt "$max_cmd_len"; then - func_append objlist " $obj" - else - # The command $test_cmds is almost too long, add a - # command to the queue. -- if test 1 -eq "$k"; then -+ if test "$k" -eq 1 ; then - # The first file doesn't have a previous command to add. - reload_objs=$objlist - eval concat_cmds=\"$reload_cmds\" -@@ -9887,10 +8395,10 @@ - reload_objs="$objlist $last_robj" - eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\" - fi -- last_robj=$output_objdir/$output_la-$k.$objext -+ last_robj=$output_objdir/$output_la-${k}.$objext - func_arith $k + 1 - k=$func_arith_result -- output=$output_objdir/$output_la-$k.$objext -+ output=$output_objdir/$output_la-${k}.$objext - objlist=" $obj" - func_len " $last_robj" - func_arith $len0 + $func_len_result -@@ -9902,9 +8410,9 @@ - # files will link in the last one created. - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ - reload_objs="$objlist $last_robj" -- eval concat_cmds=\"\$concat_cmds$reload_cmds\" -+ eval concat_cmds=\"\${concat_cmds}$reload_cmds\" - if test -n "$last_robj"; then -- eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" -+ eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\" - fi - func_append delfiles " $output" - -@@ -9912,9 +8420,9 @@ - output= - fi - -- ${skipped_export-false} && { -- func_verbose "generating symbol list for '$libname.la'" -- export_symbols=$output_objdir/$libname.exp -+ if ${skipped_export-false}; then -+ func_verbose "generating symbol list for \`$libname.la'" -+ export_symbols="$output_objdir/$libname.exp" - $opt_dry_run || $RM $export_symbols - libobjs=$output - # Append the command to create the export file. -@@ -9923,16 +8431,16 @@ - if test -n "$last_robj"; then - eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" - fi -- } -+ fi - - test -n "$save_libobjs" && - func_verbose "creating a temporary reloadable object file: $output" - - # Loop through the commands generated above and execute them. -- save_ifs=$IFS; IFS='~' -+ save_ifs="$IFS"; IFS='~' - for cmd in $concat_cmds; do -- IFS=$save_ifs -- $opt_quiet || { -+ IFS="$save_ifs" -+ $opt_silent || { - func_quote_for_expand "$cmd" - eval "func_echo $func_quote_for_expand_result" - } -@@ -9940,7 +8448,7 @@ - lt_exit=$? - - # Restore the uninstalled library and exit -- if test relink = "$opt_mode"; then -+ if test "$opt_mode" = relink; then - ( cd "$output_objdir" && \ - $RM "${realname}T" && \ - $MV "${realname}U" "$realname" ) -@@ -9949,7 +8457,7 @@ - exit $lt_exit - } - done -- IFS=$save_ifs -+ IFS="$save_ifs" - - if test -n "$export_symbols_regex" && ${skipped_export-false}; then - func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' -@@ -9957,18 +8465,18 @@ - fi - fi - -- ${skipped_export-false} && { -+ if ${skipped_export-false}; then - if test -n "$export_symbols" && test -n "$include_expsyms"; then -- tmp_export_symbols=$export_symbols -- test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols -+ tmp_export_symbols="$export_symbols" -+ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" - $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' - fi - - if test -n "$orig_export_symbols"; then - # The given exports_symbols file has to be filtered, so filter it. -- func_verbose "filter symbol list for '$libname.la' to tag DATA exports" -+ func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" - # FIXME: $output_objdir/$libname.filter potentially contains lots of -- # 's' commands, which not all seds can handle. GNU sed should be fine -+ # 's' commands which not all seds can handle. GNU sed should be fine - # though. Also, the filter scales superlinearly with the number of - # global variables. join(1) would be nice here, but unfortunately - # isn't a blessed tool. -@@ -9977,7 +8485,7 @@ - export_symbols=$output_objdir/$libname.def - $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols - fi -- } -+ fi - - libobjs=$output - # Restore the value of output. -@@ -9991,7 +8499,7 @@ - # value of $libobjs for piecewise linking. - - # Do each of the archive commands. -- if test yes = "$module" && test -n "$module_cmds"; then -+ if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then - cmds=$module_expsym_cmds - else -@@ -10013,7 +8521,7 @@ - - # Add any objects from preloaded convenience libraries - if test -n "$dlprefiles"; then -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - - func_extract_archives $gentop $dlprefiles -@@ -10021,12 +8529,11 @@ - test "X$libobjs" = "X " && libobjs= - fi - -- save_ifs=$IFS; IFS='~' -+ save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do -- IFS=$sp$nl -+ IFS="$save_ifs" - eval cmd=\"$cmd\" -- IFS=$save_ifs -- $opt_quiet || { -+ $opt_silent || { - func_quote_for_expand "$cmd" - eval "func_echo $func_quote_for_expand_result" - } -@@ -10034,7 +8541,7 @@ - lt_exit=$? - - # Restore the uninstalled library and exit -- if test relink = "$opt_mode"; then -+ if test "$opt_mode" = relink; then - ( cd "$output_objdir" && \ - $RM "${realname}T" && \ - $MV "${realname}U" "$realname" ) -@@ -10043,10 +8550,10 @@ - exit $lt_exit - } - done -- IFS=$save_ifs -+ IFS="$save_ifs" - - # Restore the uninstalled library and exit -- if test relink = "$opt_mode"; then -+ if test "$opt_mode" = relink; then - $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? - - if test -n "$convenience"; then -@@ -10066,39 +8573,39 @@ - done - - # If -module or -export-dynamic was specified, set the dlname. -- if test yes = "$module" || test yes = "$export_dynamic"; then -+ if test "$module" = yes || test "$export_dynamic" = yes; then - # On all known operating systems, these are identical. -- dlname=$soname -+ dlname="$soname" - fi - fi - ;; - - obj) -- if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then -- func_warning "'-dlopen' is ignored for objects" -+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then -+ func_warning "\`-dlopen' is ignored for objects" - fi - - case " $deplibs" in - *\ -l* | *\ -L*) -- func_warning "'-l' and '-L' are ignored for objects" ;; -+ func_warning "\`-l' and \`-L' are ignored for objects" ;; - esac - - test -n "$rpath" && \ -- func_warning "'-rpath' is ignored for objects" -+ func_warning "\`-rpath' is ignored for objects" - - test -n "$xrpath" && \ -- func_warning "'-R' is ignored for objects" -+ func_warning "\`-R' is ignored for objects" - - test -n "$vinfo" && \ -- func_warning "'-version-info' is ignored for objects" -+ func_warning "\`-version-info' is ignored for objects" - - test -n "$release" && \ -- func_warning "'-release' is ignored for objects" -+ func_warning "\`-release' is ignored for objects" - - case $output in - *.lo) - test -n "$objs$old_deplibs" && \ -- func_fatal_error "cannot build library object '$output' from non-libtool objects" -+ func_fatal_error "cannot build library object \`$output' from non-libtool objects" - - libobj=$output - func_lo2o "$libobj" -@@ -10106,7 +8613,7 @@ - ;; - *) - libobj= -- obj=$output -+ obj="$output" - ;; - esac - -@@ -10119,19 +8626,17 @@ - # the extraction. - reload_conv_objs= - gentop= -- # if reload_cmds runs $LD directly, get rid of -Wl from -- # whole_archive_flag_spec and hope we can get by with turning comma -- # into space. -- case $reload_cmds in -- *\$LD[\ \$]*) wl= ;; -- esac -+ # reload_cmds runs $LD directly, so let us get rid of -+ # -Wl from whole_archive_flag_spec and hope we can get by with -+ # turning comma into space.. -+ wl= -+ - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec"; then - eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" -- test -n "$wl" || tmp_whole_archive_flags=`$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` -- reload_conv_objs=$reload_objs\ $tmp_whole_archive_flags -+ reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` - else -- gentop=$output_objdir/${obj}x -+ gentop="$output_objdir/${obj}x" - func_append generated " $gentop" - - func_extract_archives $gentop $convenience -@@ -10140,12 +8645,12 @@ - fi - - # If we're not building shared, we need to use non_pic_objs -- test yes = "$build_libtool_libs" || libobjs=$non_pic_objects -+ test "$build_libtool_libs" != yes && libobjs="$non_pic_objects" - - # Create the old-style object. -- reload_objs=$objs$old_deplibs' '`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; /\.lib$/d; $lo2o" | $NL2SP`' '$reload_conv_objs -+ reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test - -- output=$obj -+ output="$obj" - func_execute_cmds "$reload_cmds" 'exit $?' - - # Exit if we aren't doing a library object file. -@@ -10157,7 +8662,7 @@ - exit $EXIT_SUCCESS - fi - -- test yes = "$build_libtool_libs" || { -+ if test "$build_libtool_libs" != yes; then - if test -n "$gentop"; then - func_show_eval '${RM}r "$gentop"' - fi -@@ -10167,12 +8672,12 @@ - # $show "echo timestamp > $libobj" - # $opt_dry_run || eval "echo timestamp > $libobj" || exit $? - exit $EXIT_SUCCESS -- } -+ fi - -- if test -n "$pic_flag" || test default != "$pic_mode"; then -+ if test -n "$pic_flag" || test "$pic_mode" != default; then - # Only do commands if we really have different PIC objects. - reload_objs="$libobjs $reload_conv_objs" -- output=$libobj -+ output="$libobj" - func_execute_cmds "$reload_cmds" 'exit $?' - fi - -@@ -10189,14 +8694,16 @@ - output=$func_stripname_result.exe;; - esac - test -n "$vinfo" && \ -- func_warning "'-version-info' is ignored for programs" -+ func_warning "\`-version-info' is ignored for programs" - - test -n "$release" && \ -- func_warning "'-release' is ignored for programs" -+ func_warning "\`-release' is ignored for programs" - -- $preload \ -- && test unknown,unknown,unknown = "$dlopen_support,$dlopen_self,$dlopen_self_static" \ -- && func_warning "'LT_INIT([dlopen])' not used. Assuming no dlopen support." -+ test "$preload" = yes \ -+ && test "$dlopen_support" = unknown \ -+ && test "$dlopen_self" = unknown \ -+ && test "$dlopen_self_static" = unknown && \ -+ func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support." - - case $host in - *-*-rhapsody* | *-*-darwin1.[012]) -@@ -10210,11 +8717,11 @@ - *-*-darwin*) - # Don't allow lazy linking, it breaks C++ global constructors - # But is supposedly fixed on 10.4 or later (yay!). -- if test CXX = "$tagname"; then -+ if test "$tagname" = CXX ; then - case ${MACOSX_DEPLOYMENT_TARGET-10.0} in - 10.[0123]) -- func_append compile_command " $wl-bind_at_load" -- func_append finalize_command " $wl-bind_at_load" -+ func_append compile_command " ${wl}-bind_at_load" -+ func_append finalize_command " ${wl}-bind_at_load" - ;; - esac - fi -@@ -10250,7 +8757,7 @@ - *) func_append new_libs " $deplib" ;; - esac - done -- compile_deplibs=$new_libs -+ compile_deplibs="$new_libs" - - - func_append compile_command " $compile_deplibs" -@@ -10274,7 +8781,7 @@ - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then -- hardcode_libdirs=$libdir -+ hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in -@@ -10297,7 +8804,7 @@ - fi - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) -- testbindir=`$ECHO "$libdir" | $SED -e 's*/lib$*/bin*'` -+ testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'` - case :$dllsearchpath: in - *":$libdir:"*) ;; - ::) dllsearchpath=$libdir;; -@@ -10314,10 +8821,10 @@ - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then -- libdir=$hardcode_libdirs -+ libdir="$hardcode_libdirs" - eval rpath=\" $hardcode_libdir_flag_spec\" - fi -- compile_rpath=$rpath -+ compile_rpath="$rpath" - - rpath= - hardcode_libdirs= -@@ -10325,7 +8832,7 @@ - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then -- hardcode_libdirs=$libdir -+ hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in -@@ -10350,43 +8857,45 @@ - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then -- libdir=$hardcode_libdirs -+ libdir="$hardcode_libdirs" - eval rpath=\" $hardcode_libdir_flag_spec\" - fi -- finalize_rpath=$rpath -+ finalize_rpath="$rpath" - -- if test -n "$libobjs" && test yes = "$build_old_libs"; then -+ if test -n "$libobjs" && test "$build_old_libs" = yes; then - # Transform all the library objects into standard objects. - compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP` - finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP` - fi - -- func_generate_dlsyms "$outputname" "@PROGRAM@" false -+ func_generate_dlsyms "$outputname" "@PROGRAM@" "no" - - # template prelinking step - if test -n "$prelink_cmds"; then - func_execute_cmds "$prelink_cmds" 'exit $?' - fi - -- wrappers_required=: -+ wrappers_required=yes - case $host in - *cegcc* | *mingw32ce*) - # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway. -- wrappers_required=false -+ wrappers_required=no - ;; - *cygwin* | *mingw* ) -- test yes = "$build_libtool_libs" || wrappers_required=false -+ if test "$build_libtool_libs" != yes; then -+ wrappers_required=no -+ fi - ;; - *) -- if test no = "$need_relink" || test yes != "$build_libtool_libs"; then -- wrappers_required=false -+ if test "$need_relink" = no || test "$build_libtool_libs" != yes; then -+ wrappers_required=no - fi - ;; - esac -- $wrappers_required || { -+ if test "$wrappers_required" = no; then - # Replace the output file specification. - compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'` -- link_command=$compile_command$compile_rpath -+ link_command="$compile_command$compile_rpath" - - # We have no uninstalled library dependencies, so finalize right now. - exit_status=0 -@@ -10399,12 +8908,12 @@ - fi - - # Delete the generated files. -- if test -f "$output_objdir/${outputname}S.$objext"; then -- func_show_eval '$RM "$output_objdir/${outputname}S.$objext"' -+ if test -f "$output_objdir/${outputname}S.${objext}"; then -+ func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' - fi - - exit $exit_status -- } -+ fi - - if test -n "$compile_shlibpath$finalize_shlibpath"; then - compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" -@@ -10434,9 +8943,9 @@ - fi - fi - -- if test yes = "$no_install"; then -+ if test "$no_install" = yes; then - # We don't need to create a wrapper script. -- link_command=$compile_var$compile_command$compile_rpath -+ link_command="$compile_var$compile_command$compile_rpath" - # Replace the output file specification. - link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'` - # Delete the old output file. -@@ -10453,28 +8962,27 @@ - exit $EXIT_SUCCESS - fi - -- case $hardcode_action,$fast_install in -- relink,*) -- # Fast installation is not supported -- link_command=$compile_var$compile_command$compile_rpath -- relink_command=$finalize_var$finalize_command$finalize_rpath -- -- func_warning "this platform does not like uninstalled shared libraries" -- func_warning "'$output' will be relinked during installation" -- ;; -- *,yes) -- link_command=$finalize_var$compile_command$finalize_rpath -- relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'` -- ;; -- *,no) -- link_command=$compile_var$compile_command$compile_rpath -- relink_command=$finalize_var$finalize_command$finalize_rpath -- ;; -- *,needless) -- link_command=$finalize_var$compile_command$finalize_rpath -- relink_command= -- ;; -- esac -+ if test "$hardcode_action" = relink; then -+ # Fast installation is not supported -+ link_command="$compile_var$compile_command$compile_rpath" -+ relink_command="$finalize_var$finalize_command$finalize_rpath" -+ -+ func_warning "this platform does not like uninstalled shared libraries" -+ func_warning "\`$output' will be relinked during installation" -+ else -+ if test "$fast_install" != no; then -+ link_command="$finalize_var$compile_command$finalize_rpath" -+ if test "$fast_install" = yes; then -+ relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'` -+ else -+ # fast_install is set to needless -+ relink_command= -+ fi -+ else -+ link_command="$compile_var$compile_command$compile_rpath" -+ relink_command="$finalize_var$finalize_command$finalize_rpath" -+ fi -+ fi - - # Replace the output file specification. - link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` -@@ -10531,8 +9039,8 @@ - func_dirname_and_basename "$output" "" "." - output_name=$func_basename_result - output_path=$func_dirname_result -- cwrappersource=$output_path/$objdir/lt-$output_name.c -- cwrapper=$output_path/$output_name.exe -+ cwrappersource="$output_path/$objdir/lt-$output_name.c" -+ cwrapper="$output_path/$output_name.exe" - $RM $cwrappersource $cwrapper - trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 - -@@ -10553,7 +9061,7 @@ - trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15 - $opt_dry_run || { - # note: this script will not be executed, so do not chmod. -- if test "x$build" = "x$host"; then -+ if test "x$build" = "x$host" ; then - $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result - else - func_emit_wrapper no > $func_ltwrapper_scriptname_result -@@ -10576,27 +9084,25 @@ - # See if we need to build an old-fashioned archive. - for oldlib in $oldlibs; do - -- case $build_libtool_libs in -- convenience) -- oldobjs="$libobjs_save $symfileobj" -- addlibs=$convenience -- build_libtool_libs=no -- ;; -- module) -- oldobjs=$libobjs_save -- addlibs=$old_convenience -+ if test "$build_libtool_libs" = convenience; then -+ oldobjs="$libobjs_save $symfileobj" -+ addlibs="$convenience" -+ build_libtool_libs=no -+ else -+ if test "$build_libtool_libs" = module; then -+ oldobjs="$libobjs_save" - build_libtool_libs=no -- ;; -- *) -+ else - oldobjs="$old_deplibs $non_pic_objects" -- $preload && test -f "$symfileobj" \ -- && func_append oldobjs " $symfileobj" -- addlibs=$old_convenience -- ;; -- esac -+ if test "$preload" = yes && test -f "$symfileobj"; then -+ func_append oldobjs " $symfileobj" -+ fi -+ fi -+ addlibs="$old_convenience" -+ fi - - if test -n "$addlibs"; then -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - - func_extract_archives $gentop $addlibs -@@ -10604,13 +9110,13 @@ - fi - - # Do each command in the archive commands. -- if test -n "$old_archive_from_new_cmds" && test yes = "$build_libtool_libs"; then -+ if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then - cmds=$old_archive_from_new_cmds - else - - # Add any objects from preloaded convenience libraries - if test -n "$dlprefiles"; then -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - - func_extract_archives $gentop $dlprefiles -@@ -10631,7 +9137,7 @@ - : - else - echo "copying selected object files to avoid basename conflicts..." -- gentop=$output_objdir/${outputname}x -+ gentop="$output_objdir/${outputname}x" - func_append generated " $gentop" - func_mkdir_p "$gentop" - save_oldobjs=$oldobjs -@@ -10640,7 +9146,7 @@ - for obj in $save_oldobjs - do - func_basename "$obj" -- objbase=$func_basename_result -+ objbase="$func_basename_result" - case " $oldobjs " in - " ") oldobjs=$obj ;; - *[\ /]"$objbase "*) -@@ -10709,18 +9215,18 @@ - else - # the above command should be used before it gets too long - oldobjs=$objlist -- if test "$obj" = "$last_oldobj"; then -+ if test "$obj" = "$last_oldobj" ; then - RANLIB=$save_RANLIB - fi - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ -- eval concat_cmds=\"\$concat_cmds$old_archive_cmds\" -+ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" - objlist= - len=$len0 - fi - done - RANLIB=$save_RANLIB - oldobjs=$objlist -- if test -z "$oldobjs"; then -+ if test "X$oldobjs" = "X" ; then - eval cmds=\"\$concat_cmds\" - else - eval cmds=\"\$concat_cmds~\$old_archive_cmds\" -@@ -10737,7 +9243,7 @@ - case $output in - *.la) - old_library= -- test yes = "$build_old_libs" && old_library=$libname.$libext -+ test "$build_old_libs" = yes && old_library="$libname.$libext" - func_verbose "creating $output" - - # Preserve any variables that may affect compiler behavior -@@ -10752,31 +9258,31 @@ - fi - done - # Quote the link command for shipping. -- relink_command="(cd `pwd`; $SHELL \"$progpath\" $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" -+ relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" - relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"` -- if test yes = "$hardcode_automatic"; then -+ if test "$hardcode_automatic" = yes ; then - relink_command= - fi - - # Only create the output if not a dry run. - $opt_dry_run || { - for installed in no yes; do -- if test yes = "$installed"; then -+ if test "$installed" = yes; then - if test -z "$install_libdir"; then - break - fi -- output=$output_objdir/${outputname}i -+ output="$output_objdir/$outputname"i - # Replace all uninstalled libtool libraries with the installed ones - newdependency_libs= - for deplib in $dependency_libs; do - case $deplib in - *.la) - func_basename "$deplib" -- name=$func_basename_result -+ name="$func_basename_result" - func_resolve_sysroot "$deplib" -- eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` - test -z "$libdir" && \ -- func_fatal_error "'$deplib' is not a valid libtool archive" -+ func_fatal_error "\`$deplib' is not a valid libtool archive" - func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name" - ;; - -L*) -@@ -10792,23 +9298,23 @@ - *) func_append newdependency_libs " $deplib" ;; - esac - done -- dependency_libs=$newdependency_libs -+ dependency_libs="$newdependency_libs" - newdlfiles= - - for lib in $dlfiles; do - case $lib in - *.la) - func_basename "$lib" -- name=$func_basename_result -- eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib` -+ name="$func_basename_result" -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` - test -z "$libdir" && \ -- func_fatal_error "'$lib' is not a valid libtool archive" -+ func_fatal_error "\`$lib' is not a valid libtool archive" - func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name" - ;; - *) func_append newdlfiles " $lib" ;; - esac - done -- dlfiles=$newdlfiles -+ dlfiles="$newdlfiles" - newdlprefiles= - for lib in $dlprefiles; do - case $lib in -@@ -10818,34 +9324,34 @@ - # didn't already link the preopened objects directly into - # the library: - func_basename "$lib" -- name=$func_basename_result -- eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib` -+ name="$func_basename_result" -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` - test -z "$libdir" && \ -- func_fatal_error "'$lib' is not a valid libtool archive" -+ func_fatal_error "\`$lib' is not a valid libtool archive" - func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name" - ;; - esac - done -- dlprefiles=$newdlprefiles -+ dlprefiles="$newdlprefiles" - else - newdlfiles= - for lib in $dlfiles; do - case $lib in -- [\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;; -+ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac - func_append newdlfiles " $abs" - done -- dlfiles=$newdlfiles -+ dlfiles="$newdlfiles" - newdlprefiles= - for lib in $dlprefiles; do - case $lib in -- [\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;; -+ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac - func_append newdlprefiles " $abs" - done -- dlprefiles=$newdlprefiles -+ dlprefiles="$newdlprefiles" - fi - $RM $output - # place dlname in correct position for cygwin -@@ -10861,9 +9367,10 @@ - case $host,$output,$installed,$module,$dlname in - *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll) - # If a -bindir argument was supplied, place the dll there. -- if test -n "$bindir"; then -+ if test "x$bindir" != x ; -+ then - func_relative_path "$install_libdir" "$bindir" -- tdlname=$func_relative_path_result/$dlname -+ tdlname=$func_relative_path_result$dlname - else - # Otherwise fall back on heuristic. - tdlname=../bin/$dlname -@@ -10872,7 +9379,7 @@ - esac - $ECHO > $output "\ - # $outputname - a libtool library file --# Generated by $PROGRAM (GNU $PACKAGE) $VERSION -+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION - # - # Please DO NOT delete this file! - # It is necessary for linking the library. -@@ -10886,7 +9393,7 @@ - # The name of the static archive. - old_library='$old_library' - --# Linker flags that cannot go in dependency_libs. -+# Linker flags that can not go in dependency_libs. - inherited_linker_flags='$new_inherited_linker_flags' - - # Libraries that this one depends upon. -@@ -10912,7 +9419,7 @@ - - # Directory that this library needs to be installed in: - libdir='$install_libdir'" -- if test no,yes = "$installed,$need_relink"; then -+ if test "$installed" = no && test "$need_relink" = yes; then - $ECHO >> $output "\ - relink_command=\"$relink_command\"" - fi -@@ -10927,29 +9434,27 @@ - exit $EXIT_SUCCESS - } - --if test link = "$opt_mode" || test relink = "$opt_mode"; then -- func_mode_link ${1+"$@"} --fi -+{ test "$opt_mode" = link || test "$opt_mode" = relink; } && -+ func_mode_link ${1+"$@"} - - - # func_mode_uninstall arg... - func_mode_uninstall () - { -- $debug_cmd -- -- RM=$nonopt -+ $opt_debug -+ RM="$nonopt" - files= -- rmforce=false -+ rmforce= - exit_status=0 - - # This variable tells wrapper scripts just to set variables rather - # than running their programs. -- libtool_install_magic=$magic -+ libtool_install_magic="$magic" - - for arg - do - case $arg in -- -f) func_append RM " $arg"; rmforce=: ;; -+ -f) func_append RM " $arg"; rmforce=yes ;; - -*) func_append RM " $arg" ;; - *) func_append files " $arg" ;; - esac -@@ -10962,18 +9467,18 @@ - - for file in $files; do - func_dirname "$file" "" "." -- dir=$func_dirname_result -- if test . = "$dir"; then -- odir=$objdir -+ dir="$func_dirname_result" -+ if test "X$dir" = X.; then -+ odir="$objdir" - else -- odir=$dir/$objdir -+ odir="$dir/$objdir" - fi - func_basename "$file" -- name=$func_basename_result -- test uninstall = "$opt_mode" && odir=$dir -+ name="$func_basename_result" -+ test "$opt_mode" = uninstall && odir="$dir" - - # Remember odir for removal later, being careful to avoid duplicates -- if test clean = "$opt_mode"; then -+ if test "$opt_mode" = clean; then - case " $rmdirs " in - *" $odir "*) ;; - *) func_append rmdirs " $odir" ;; -@@ -10988,11 +9493,11 @@ - elif test -d "$file"; then - exit_status=1 - continue -- elif $rmforce; then -+ elif test "$rmforce" = yes; then - continue - fi - -- rmfiles=$file -+ rmfiles="$file" - - case $name in - *.la) -@@ -11006,7 +9511,7 @@ - done - test -n "$old_library" && func_append rmfiles " $odir/$old_library" - -- case $opt_mode in -+ case "$opt_mode" in - clean) - case " $library_names " in - *" $dlname "*) ;; -@@ -11017,12 +9522,12 @@ - uninstall) - if test -n "$library_names"; then - # Do each command in the postuninstall commands. -- func_execute_cmds "$postuninstall_cmds" '$rmforce || exit_status=1' -+ func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' - fi - - if test -n "$old_library"; then - # Do each command in the old_postuninstall commands. -- func_execute_cmds "$old_postuninstall_cmds" '$rmforce || exit_status=1' -+ func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' - fi - # FIXME: should reinstall the best remaining shared library. - ;; -@@ -11038,19 +9543,21 @@ - func_source $dir/$name - - # Add PIC object to the list of files to remove. -- if test -n "$pic_object" && test none != "$pic_object"; then -+ if test -n "$pic_object" && -+ test "$pic_object" != none; then - func_append rmfiles " $dir/$pic_object" - fi - - # Add non-PIC object to the list of files to remove. -- if test -n "$non_pic_object" && test none != "$non_pic_object"; then -+ if test -n "$non_pic_object" && -+ test "$non_pic_object" != none; then - func_append rmfiles " $dir/$non_pic_object" - fi - fi - ;; - - *) -- if test clean = "$opt_mode"; then -+ if test "$opt_mode" = clean ; then - noexename=$name - case $file in - *.exe) -@@ -11077,12 +9584,12 @@ - - # note $name still contains .exe if it was in $file originally - # as does the version of $file that was added into $rmfiles -- func_append rmfiles " $odir/$name $odir/${name}S.$objext" -- if test yes = "$fast_install" && test -n "$relink_command"; then -+ func_append rmfiles " $odir/$name $odir/${name}S.${objext}" -+ if test "$fast_install" = yes && test -n "$relink_command"; then - func_append rmfiles " $odir/lt-$name" - fi -- if test "X$noexename" != "X$name"; then -- func_append rmfiles " $odir/lt-$noexename.c" -+ if test "X$noexename" != "X$name" ; then -+ func_append rmfiles " $odir/lt-${noexename}.c" - fi - fi - fi -@@ -11091,7 +9598,7 @@ - func_show_eval "$RM $rmfiles" 'exit_status=1' - done - -- # Try to remove the $objdir's in the directories where we deleted files -+ # Try to remove the ${objdir}s in the directories where we deleted files - for dir in $rmdirs; do - if test -d "$dir"; then - func_show_eval "rmdir $dir >/dev/null 2>&1" -@@ -11101,17 +9608,16 @@ - exit $exit_status - } - --if test uninstall = "$opt_mode" || test clean = "$opt_mode"; then -- func_mode_uninstall ${1+"$@"} --fi -+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } && -+ func_mode_uninstall ${1+"$@"} - - test -z "$opt_mode" && { -- help=$generic_help -+ help="$generic_help" - func_fatal_help "you must specify a MODE" - } - - test -z "$exec_cmd" && \ -- func_fatal_help "invalid operation mode '$opt_mode'" -+ func_fatal_help "invalid operation mode \`$opt_mode'" - - if test -n "$exec_cmd"; then - eval exec "$exec_cmd" -@@ -11122,7 +9628,7 @@ - - - # The TAGs below are defined such that we never get into a situation --# where we disable both kinds of libraries. Given conflicting -+# in which we disable both kinds of libraries. Given conflicting - # choices, we go for a static library, that is the most portable, - # since we can't tell whether shared libraries were disabled because - # the user asked for that or because the platform doesn't support -@@ -11145,3 +9651,5 @@ - # mode:shell-script - # sh-indentation:2 - # End: -+# vi:sw=2 -+ -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/lt~obsolete.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/lt~obsolete.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/lt~obsolete.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/lt~obsolete.m4 2020-07-16 10:48:35.467673000 +0200 -@@ -1,7 +1,6 @@ - # lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*- - # --# Copyright (C) 2004-2005, 2007, 2009, 2011-2015 Free Software --# Foundation, Inc. -+# Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc. - # Written by Scott James Remnant, 2004. - # - # This file is free software; the Free Software Foundation gives -@@ -12,7 +11,7 @@ - - # These exist entirely to fool aclocal when bootstrapping libtool. - # --# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN), -+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN) - # which have later been changed to m4_define as they aren't part of the - # exported API, or moved to Autoconf or Automake where they belong. - # -@@ -26,7 +25,7 @@ - # included after everything else. This provides aclocal with the - # AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything - # because those macros already exist, or will be overwritten later. --# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. -+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. - # - # Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here. - # Yes, that means every name once taken will need to remain here until -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/ltoptions.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/ltoptions.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/ltoptions.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/ltoptions.m4 2020-07-16 10:48:35.465838445 +0200 -@@ -1,14 +1,14 @@ - # Helper functions for option handling. -*- Autoconf -*- - # --# Copyright (C) 2004-2005, 2007-2009, 2011-2015 Free Software --# Foundation, Inc. -+# Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, -+# Inc. - # Written by Gary V. Vaughan, 2004 - # - # This file is free software; the Free Software Foundation gives - # unlimited permission to copy and/or distribute it, with or without - # modifications, as long as this notice is preserved. - --# serial 8 ltoptions.m4 -+# serial 7 ltoptions.m4 - - # This is to help aclocal find these macros, as it can't see m4_define. - AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) -@@ -29,7 +29,7 @@ - [m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl - m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]), - _LT_MANGLE_DEFUN([$1], [$2]), -- [m4_warning([Unknown $1 option '$2'])])[]dnl -+ [m4_warning([Unknown $1 option `$2'])])[]dnl - ]) - - -@@ -75,15 +75,13 @@ - dnl - dnl If no reference was made to various pairs of opposing options, then - dnl we run the default mode handler for the pair. For example, if neither -- dnl 'shared' nor 'disable-shared' was passed, we enable building of shared -+ dnl `shared' nor `disable-shared' was passed, we enable building of shared - dnl archives by default: - _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED]) - _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC]) - _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC]) - _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install], -- [_LT_ENABLE_FAST_INSTALL]) -- _LT_UNLESS_OPTIONS([LT_INIT], [aix-soname=aix aix-soname=both aix-soname=svr4], -- [_LT_WITH_AIX_SONAME([aix])]) -+ [_LT_ENABLE_FAST_INSTALL]) - ]) - ])# _LT_SET_OPTIONS - -@@ -114,7 +112,7 @@ - [_LT_SET_OPTION([LT_INIT], [dlopen]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you --put the 'dlopen' option into LT_INIT's first parameter.]) -+put the `dlopen' option into LT_INIT's first parameter.]) - ]) - - dnl aclocal-1.4 backwards compatibility: -@@ -150,7 +148,7 @@ - _LT_SET_OPTION([LT_INIT], [win32-dll]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you --put the 'win32-dll' option into LT_INIT's first parameter.]) -+put the `win32-dll' option into LT_INIT's first parameter.]) - ]) - - dnl aclocal-1.4 backwards compatibility: -@@ -159,9 +157,9 @@ - - # _LT_ENABLE_SHARED([DEFAULT]) - # ---------------------------- --# implement the --enable-shared flag, and supports the 'shared' and --# 'disable-shared' LT_INIT options. --# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. -+# implement the --enable-shared flag, and supports the `shared' and -+# `disable-shared' LT_INIT options. -+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. - m4_define([_LT_ENABLE_SHARED], - [m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl - AC_ARG_ENABLE([shared], -@@ -174,14 +172,14 @@ - *) - enable_shared=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_shared=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac], - [enable_shared=]_LT_ENABLE_SHARED_DEFAULT) -@@ -213,9 +211,9 @@ - - # _LT_ENABLE_STATIC([DEFAULT]) - # ---------------------------- --# implement the --enable-static flag, and support the 'static' and --# 'disable-static' LT_INIT options. --# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. -+# implement the --enable-static flag, and support the `static' and -+# `disable-static' LT_INIT options. -+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. - m4_define([_LT_ENABLE_STATIC], - [m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl - AC_ARG_ENABLE([static], -@@ -228,14 +226,14 @@ - *) - enable_static=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_static=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac], - [enable_static=]_LT_ENABLE_STATIC_DEFAULT) -@@ -267,9 +265,9 @@ - - # _LT_ENABLE_FAST_INSTALL([DEFAULT]) - # ---------------------------------- --# implement the --enable-fast-install flag, and support the 'fast-install' --# and 'disable-fast-install' LT_INIT options. --# DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. -+# implement the --enable-fast-install flag, and support the `fast-install' -+# and `disable-fast-install' LT_INIT options. -+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. - m4_define([_LT_ENABLE_FAST_INSTALL], - [m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl - AC_ARG_ENABLE([fast-install], -@@ -282,14 +280,14 @@ - *) - enable_fast_install=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_fast_install=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac], - [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT) -@@ -306,14 +304,14 @@ - [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you put --the 'fast-install' option into LT_INIT's first parameter.]) -+the `fast-install' option into LT_INIT's first parameter.]) - ]) - - AU_DEFUN([AC_DISABLE_FAST_INSTALL], - [_LT_SET_OPTION([LT_INIT], [disable-fast-install]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you put --the 'disable-fast-install' option into LT_INIT's first parameter.]) -+the `disable-fast-install' option into LT_INIT's first parameter.]) - ]) - - dnl aclocal-1.4 backwards compatibility: -@@ -321,64 +319,11 @@ - dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], []) - - --# _LT_WITH_AIX_SONAME([DEFAULT]) --# ---------------------------------- --# implement the --with-aix-soname flag, and support the `aix-soname=aix' --# and `aix-soname=both' and `aix-soname=svr4' LT_INIT options. DEFAULT --# is either `aix', `both' or `svr4'. If omitted, it defaults to `aix'. --m4_define([_LT_WITH_AIX_SONAME], --[m4_define([_LT_WITH_AIX_SONAME_DEFAULT], [m4_if($1, svr4, svr4, m4_if($1, both, both, aix))])dnl --shared_archive_member_spec= --case $host,$enable_shared in --power*-*-aix[[5-9]]*,yes) -- AC_MSG_CHECKING([which variant of shared library versioning to provide]) -- AC_ARG_WITH([aix-soname], -- [AS_HELP_STRING([--with-aix-soname=aix|svr4|both], -- [shared library versioning (aka "SONAME") variant to provide on AIX, @<:@default=]_LT_WITH_AIX_SONAME_DEFAULT[@:>@.])], -- [case $withval in -- aix|svr4|both) -- ;; -- *) -- AC_MSG_ERROR([Unknown argument to --with-aix-soname]) -- ;; -- esac -- lt_cv_with_aix_soname=$with_aix_soname], -- [AC_CACHE_VAL([lt_cv_with_aix_soname], -- [lt_cv_with_aix_soname=]_LT_WITH_AIX_SONAME_DEFAULT) -- with_aix_soname=$lt_cv_with_aix_soname]) -- AC_MSG_RESULT([$with_aix_soname]) -- if test aix != "$with_aix_soname"; then -- # For the AIX way of multilib, we name the shared archive member -- # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o', -- # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File. -- # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag, -- # the AIX toolchain works better with OBJECT_MODE set (default 32). -- if test 64 = "${OBJECT_MODE-32}"; then -- shared_archive_member_spec=shr_64 -- else -- shared_archive_member_spec=shr -- fi -- fi -- ;; --*) -- with_aix_soname=aix -- ;; --esac -- --_LT_DECL([], [shared_archive_member_spec], [0], -- [Shared archive member basename, for filename based shared library versioning on AIX])dnl --])# _LT_WITH_AIX_SONAME -- --LT_OPTION_DEFINE([LT_INIT], [aix-soname=aix], [_LT_WITH_AIX_SONAME([aix])]) --LT_OPTION_DEFINE([LT_INIT], [aix-soname=both], [_LT_WITH_AIX_SONAME([both])]) --LT_OPTION_DEFINE([LT_INIT], [aix-soname=svr4], [_LT_WITH_AIX_SONAME([svr4])]) -- -- - # _LT_WITH_PIC([MODE]) - # -------------------- --# implement the --with-pic flag, and support the 'pic-only' and 'no-pic' -+# implement the --with-pic flag, and support the `pic-only' and `no-pic' - # LT_INIT options. --# MODE is either 'yes' or 'no'. If omitted, it defaults to 'both'. -+# MODE is either `yes' or `no'. If omitted, it defaults to `both'. - m4_define([_LT_WITH_PIC], - [AC_ARG_WITH([pic], - [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@], -@@ -389,17 +334,19 @@ - *) - pic_mode=default - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for lt_pkg in $withval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$lt_pkg" = "X$lt_p"; then - pic_mode=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac], -- [pic_mode=m4_default([$1], [default])]) -+ [pic_mode=default]) -+ -+test -z "$pic_mode" && pic_mode=m4_default([$1], [default]) - - _LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl - ])# _LT_WITH_PIC -@@ -412,7 +359,7 @@ - [_LT_SET_OPTION([LT_INIT], [pic-only]) - AC_DIAGNOSE([obsolete], - [$0: Remove this warning and the call to _LT_SET_OPTION when you --put the 'pic-only' option into LT_INIT's first parameter.]) -+put the `pic-only' option into LT_INIT's first parameter.]) - ]) - - dnl aclocal-1.4 backwards compatibility: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/ltsugar.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/ltsugar.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/ltsugar.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/ltsugar.m4 2020-07-16 10:48:35.466517000 +0200 -@@ -1,7 +1,6 @@ - # ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*- - # --# Copyright (C) 2004-2005, 2007-2008, 2011-2015 Free Software --# Foundation, Inc. -+# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc. - # Written by Gary V. Vaughan, 2004 - # - # This file is free software; the Free Software Foundation gives -@@ -34,7 +33,7 @@ - # ------------ - # Manipulate m4 lists. - # These macros are necessary as long as will still need to support --# Autoconf-2.59, which quotes differently. -+# Autoconf-2.59 which quotes differently. - m4_define([lt_car], [[$1]]) - m4_define([lt_cdr], - [m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])], -@@ -45,7 +44,7 @@ - - # lt_append(MACRO-NAME, STRING, [SEPARATOR]) - # ------------------------------------------ --# Redefine MACRO-NAME to hold its former content plus 'SEPARATOR''STRING'. -+# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'. - # Note that neither SEPARATOR nor STRING are expanded; they are appended - # to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked). - # No SEPARATOR is output if MACRO-NAME was previously undefined (different -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/ltversion.m4 psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/ltversion.m4 ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/ltversion.m4 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/ltversion.m4 2020-07-16 10:48:35.467101000 +0200 -@@ -1,6 +1,6 @@ - # ltversion.m4 -- version numbers -*- Autoconf -*- - # --# Copyright (C) 2004, 2011-2015 Free Software Foundation, Inc. -+# Copyright (C) 2004 Free Software Foundation, Inc. - # Written by Scott James Remnant, 2004 - # - # This file is free software; the Free Software Foundation gives -@@ -9,15 +9,15 @@ - - # @configure_input@ - --# serial 4179 ltversion.m4 -+# serial 3337 ltversion.m4 - # This file is part of GNU Libtool - --m4_define([LT_PACKAGE_VERSION], [2.4.6]) --m4_define([LT_PACKAGE_REVISION], [2.4.6]) -+m4_define([LT_PACKAGE_VERSION], [2.4.2]) -+m4_define([LT_PACKAGE_REVISION], [1.3337]) - - AC_DEFUN([LTVERSION_VERSION], --[macro_version='2.4.6' --macro_revision='2.4.6' -+[macro_version='2.4.2' -+macro_revision='1.3337' - _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) - _LT_DECL(, macro_revision, 0) - ]) -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/missing psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/missing ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/missing 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/missing 2020-07-16 10:48:35.468550000 +0200 -@@ -160,7 +160,7 @@ - ;; - autom4te*) - echo "You might have modified some maintainer files that require" -- echo "the 'autom4te' program to be rebuilt." -+ echo "the 'automa4te' program to be rebuilt." - program_details 'autom4te' - ;; - bison*|yacc*) -@@ -210,6 +210,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/test-driver psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/test-driver ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/confdb/test-driver 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/confdb/test-driver 2020-07-16 10:48:35.469760000 +0200 -@@ -106,14 +106,11 @@ - # Test script is run here. - "$@" >$log_file 2>&1 - estatus=$? -- - if test $enable_hard_errors = no && test $estatus -eq 99; then -- tweaked_estatus=1 --else -- tweaked_estatus=$estatus -+ estatus=1 - fi - --case $tweaked_estatus:$expect_failure in -+case $estatus:$expect_failure in - 0:yes) col=$red res=XPASS recheck=yes gcopy=yes;; - 0:*) col=$grn res=PASS recheck=no gcopy=no;; - 77:*) col=$blu res=SKIP recheck=no gcopy=yes;; -@@ -122,12 +119,6 @@ - *:*) col=$red res=FAIL recheck=yes gcopy=yes;; - esac - --# Report the test outcome and exit status in the logs, so that one can --# know whether the test passed or failed simply by looking at the '.log' --# file, without the need of also peaking into the corresponding '.trs' --# file (automake bug#11814). --echo "$res $test_name (exit status: $estatus)" >>$log_file -- - # Report outcome to console. - echo "${col}${res}${std}: $test_name" - -@@ -143,6 +134,6 @@ - # eval: (add-hook 'before-save-hook 'time-stamp) - # time-stamp-start: "scriptversion=" - # time-stamp-format: "%:y-%02m-%02d.%02H" --# time-stamp-time-zone: "UTC0" -+# time-stamp-time-zone: "UTC" - # time-stamp-end: "; # UTC" - # End: -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/configure psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/configure ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/configure 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/configure 2020-07-16 10:48:35.484369056 +0200 -@@ -643,7 +643,6 @@ - MPL_EMBEDDED_MODE_TRUE - libmpl_so_version - MPLLIBNAME --LT_SYS_LIBRARY_PATH - OTOOL64 - OTOOL - LIPO -@@ -765,7 +764,6 @@ - enable_static - with_pic - enable_fast_install --with_aix_soname - with_gnu_ld - with_sysroot - enable_libtool_lock -@@ -792,7 +790,6 @@ - LIBS - CPPFLAGS - CPP --LT_SYS_LIBRARY_PATH - MPLLIBNAME - GCOV' - -@@ -1458,12 +1455,9 @@ - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) - --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use - both] -- --with-aix-soname=aix|svr4|both -- shared library versioning (aka "SONAME") variant to -- provide on AIX, [default=aix]. - --with-gnu-ld assume the C compiler uses GNU ld [default=no] -- --with-sysroot[=DIR] Search for dependent libraries within DIR (or the -- compiler's sysroot if not specified). -+ --with-sysroot=DIR Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified). - --without-valgrind to disable valgrind support (such as because of - version issues) - --with-valgrind=PATH use valgrind headers installed in PATH (default is -@@ -1498,8 +1492,6 @@ - CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if - you have headers in a nonstandard directory <include dir> - CPP C preprocessor -- LT_SYS_LIBRARY_PATH -- User-defined run-time library search path. - MPLLIBNAME can be used to override the name of the MPL library (default: - "mpl") - GCOV name/path for the gcov utility -@@ -2589,8 +2581,8 @@ - ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' - program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` - --# Expand $ac_aux_dir to an absolute path. --am_aux_dir=`cd "$ac_aux_dir" && pwd` -+# expand $ac_aux_dir to an absolute path -+am_aux_dir=`cd $ac_aux_dir && pwd` - - if test x"${MISSING+set}" != xset; then - case $am_aux_dir in -@@ -2609,7 +2601,7 @@ - $as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} - fi - --if test x"${install_sh+set}" != xset; then -+if test x"${install_sh}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; -@@ -2937,8 +2929,8 @@ - # <https://lists.gnu.org/archive/html/automake/2012-07/msg00014.html> - mkdir_p='$(MKDIR_P)' - --# We need awk for the "check" target (and possibly the TAP driver). The --# system "awk" is bad on some platforms. -+# We need awk for the "check" target. The system "awk" is bad on -+# some platforms. - # Always define AMTAR for backward compatibility. Yes, it's still used - # in the wild :-( We should find a proper way to deprecate it ... - AMTAR='$${TAR-tar}' -@@ -3666,65 +3658,6 @@ - ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' - ac_compiler_gnu=$ac_cv_c_compiler_gnu - --ac_ext=c --ac_cpp='$CPP $CPPFLAGS' --ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' --ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' --ac_compiler_gnu=$ac_cv_c_compiler_gnu --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 --$as_echo_n "checking whether $CC understands -c and -o together... " >&6; } --if ${am_cv_prog_cc_c_o+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext --/* end confdefs.h. */ -- --int --main () --{ -- -- ; -- return 0; --} --_ACEOF -- # Make sure it works both with $CC and with simple cc. -- # Following AC_PROG_CC_C_O, we do the test twice because some -- # compilers refuse to overwrite an existing .o file with -o, -- # though they will create one. -- am_cv_prog_cc_c_o=yes -- for am_i in 1 2; do -- if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 -- ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 -- ac_status=$? -- echo "$as_me:$LINENO: \$? = $ac_status" >&5 -- (exit $ac_status); } \ -- && test -f conftest2.$ac_objext; then -- : OK -- else -- am_cv_prog_cc_c_o=no -- break -- fi -- done -- rm -f core conftest* -- unset am_i --fi --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 --$as_echo "$am_cv_prog_cc_c_o" >&6; } --if test "$am_cv_prog_cc_c_o" != yes; then -- # Losing compiler, so override with the script. -- # FIXME: It is wrong to rewrite CC. -- # But if we don't then we get into trouble of one sort or another. -- # A longer-term fix would be to have automake use am__CC in this case, -- # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" -- CC="$am_aux_dir/compile $CC" --fi --ac_ext=c --ac_cpp='$CPP $CPPFLAGS' --ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' --ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' --ac_compiler_gnu=$ac_cv_c_compiler_gnu -- -- - depcc="$CC" am_compiler_list= - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 -@@ -3859,6 +3792,131 @@ - eval pac_save_CFLAGS_${pac_save_CFLAGS_nesting}="" - - -+if test "x$CC" != xcc; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5 -+$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5 -+$as_echo_n "checking whether cc understands -c and -o together... " >&6; } -+fi -+set dummy $CC; ac_cc=`$as_echo "$2" | -+ sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -+if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ -+ -+ ; -+ return 0; -+} -+_ACEOF -+# Make sure it works both with $CC and with simple cc. -+# We do the test twice because some compilers refuse to overwrite an -+# existing .o file with -o, though they will create one. -+ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5' -+rm -f conftest2.* -+if { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } && -+ test -f conftest2.$ac_objext && { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; }; -+then -+ eval ac_cv_prog_cc_${ac_cc}_c_o=yes -+ if test "x$CC" != xcc; then -+ # Test first that cc exists at all. -+ if { ac_try='cc -c conftest.$ac_ext >&5' -+ { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; }; }; then -+ ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5' -+ rm -f conftest2.* -+ if { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } && -+ test -f conftest2.$ac_objext && { { case "(($ac_try" in -+ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; -+ *) ac_try_echo=$ac_try;; -+esac -+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -+$as_echo "$ac_try_echo"; } >&5 -+ (eval "$ac_try") 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; }; -+ then -+ # cc works too. -+ : -+ else -+ # cc exists but doesn't like -o. -+ eval ac_cv_prog_cc_${ac_cc}_c_o=no -+ fi -+ fi -+ fi -+else -+ eval ac_cv_prog_cc_${ac_cc}_c_o=no -+fi -+rm -f core conftest* -+ -+fi -+if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -+$as_echo "yes" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+ -+$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h -+ -+fi -+ -+# FIXME: we rely on the cache variable name because -+# there is no other way. -+set dummy $CC -+am_cc=`echo $2 | sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -+eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -+if test "$am_t" != yes; then -+ # Losing compiler, so override with the script. -+ # FIXME: It is wrong to rewrite CC. -+ # But if we don't then we get into trouble of one sort or another. -+ # A longer-term fix would be to have automake use am__CC in this case, -+ # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" -+ CC="$am_aux_dir/compile $CC" -+fi -+ - - - -@@ -4428,13 +4486,7 @@ - if ${am_cv_ar_interface+:} false; then : - $as_echo_n "(cached) " >&6 - else -- ac_ext=c --ac_cpp='$CPP $CPPFLAGS' --ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' --ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' --ac_compiler_gnu=$ac_cv_c_compiler_gnu -- -- am_cv_ar_interface=ar -+ am_cv_ar_interface=ar - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - int some_variable = 0; -@@ -4465,11 +4517,6 @@ - - fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -- ac_ext=c --ac_cpp='$CPP $CPPFLAGS' --ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' --ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' --ac_compiler_gnu=$ac_cv_c_compiler_gnu - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_ar_interface" >&5 -@@ -4511,8 +4558,8 @@ - - - --macro_version='2.4.6' --macro_revision='2.4.6' -+macro_version='2.4.2' -+macro_revision='1.3337' - - - -@@ -4526,7 +4573,7 @@ - - - --ltmain=$ac_aux_dir/ltmain.sh -+ltmain="$ac_aux_dir/ltmain.sh" - - # Make sure we can run config.sub. - $SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || -@@ -4646,7 +4693,7 @@ - $ECHO "" - } - --case $ECHO in -+case "$ECHO" in - printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 - $as_echo "printf" >&6; } ;; - print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 -@@ -4839,19 +4886,19 @@ - - # Check whether --with-gnu-ld was given. - if test "${with_gnu_ld+set}" = set; then : -- withval=$with_gnu_ld; test no = "$withval" || with_gnu_ld=yes -+ withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes - else - with_gnu_ld=no - fi - - ac_prog=ld --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - # Check if gcc -print-prog-name=ld gives a path. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 - $as_echo_n "checking for ld used by $CC... " >&6; } - case $host in - *-*-mingw*) -- # gcc leaves a trailing carriage return, which upsets mingw -+ # gcc leaves a trailing carriage return which upsets mingw - ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; - *) - ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; -@@ -4865,7 +4912,7 @@ - while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do - ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` - done -- test -z "$LD" && LD=$ac_prog -+ test -z "$LD" && LD="$ac_prog" - ;; - "") - # If it fails, then pretend we aren't using GCC. -@@ -4876,7 +4923,7 @@ - with_gnu_ld=unknown - ;; - esac --elif test yes = "$with_gnu_ld"; then -+elif test "$with_gnu_ld" = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 - $as_echo_n "checking for GNU ld... " >&6; } - else -@@ -4887,32 +4934,32 @@ - $as_echo_n "(cached) " >&6 - else - if test -z "$LD"; then -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then -- lt_cv_path_LD=$ac_dir/$ac_prog -+ lt_cv_path_LD="$ac_dir/$ac_prog" - # Check to see if the program is GNU ld. I'd rather use --version, - # but apparently some variants of GNU ld only accept -v. - # Break only if it was the GNU/non-GNU ld that we prefer. - case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in - *GNU* | *'with BFD'*) -- test no != "$with_gnu_ld" && break -+ test "$with_gnu_ld" != no && break - ;; - *) -- test yes != "$with_gnu_ld" && break -+ test "$with_gnu_ld" != yes && break - ;; - esac - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - else -- lt_cv_path_LD=$LD # Let the user override the test with a path. -+ lt_cv_path_LD="$LD" # Let the user override the test with a path. - fi - fi - --LD=$lt_cv_path_LD -+LD="$lt_cv_path_LD" - if test -n "$LD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5 - $as_echo "$LD" >&6; } -@@ -4955,38 +5002,33 @@ - else - if test -n "$NM"; then - # Let the user override the test. -- lt_cv_path_NM=$NM -+ lt_cv_path_NM="$NM" - else -- lt_nm_to_check=${ac_tool_prefix}nm -+ lt_nm_to_check="${ac_tool_prefix}nm" - if test -n "$ac_tool_prefix" && test "$build" = "$host"; then - lt_nm_to_check="$lt_nm_to_check nm" - fi - for lt_tmp_nm in $lt_nm_to_check; do -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- tmp_nm=$ac_dir/$lt_tmp_nm -- if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then -+ tmp_nm="$ac_dir/$lt_tmp_nm" -+ if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then - # Check to see if the nm accepts a BSD-compat flag. -- # Adding the 'sed 1q' prevents false positives on HP-UX, which says: -+ # Adding the `sed 1q' prevents false positives on HP-UX, which says: - # nm: unknown option "B" ignored - # Tru64's nm complains that /dev/null is an invalid object file -- # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty -- case $build_os in -- mingw*) lt_bad_file=conftest.nm/nofile ;; -- *) lt_bad_file=/dev/null ;; -- esac -- case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in -- *$lt_bad_file* | *'Invalid file or object type'*) -+ case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in -+ */dev/null* | *'Invalid file or object type'*) - lt_cv_path_NM="$tmp_nm -B" -- break 2 -+ break - ;; - *) - case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in - */dev/null*) - lt_cv_path_NM="$tmp_nm -p" -- break 2 -+ break - ;; - *) - lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but -@@ -4997,15 +5039,15 @@ - esac - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - done - : ${lt_cv_path_NM=no} - fi - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 - $as_echo "$lt_cv_path_NM" >&6; } --if test no != "$lt_cv_path_NM"; then -- NM=$lt_cv_path_NM -+if test "$lt_cv_path_NM" != "no"; then -+ NM="$lt_cv_path_NM" - else - # Didn't find any BSD compatible name lister, look for dumpbin. - if test -n "$DUMPBIN"; then : -@@ -5111,9 +5153,9 @@ - fi - fi - -- case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in -+ case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in - *COFF*) -- DUMPBIN="$DUMPBIN -symbols -headers" -+ DUMPBIN="$DUMPBIN -symbols" - ;; - *) - DUMPBIN=: -@@ -5121,8 +5163,8 @@ - esac - fi - -- if test : != "$DUMPBIN"; then -- NM=$DUMPBIN -+ if test "$DUMPBIN" != ":"; then -+ NM="$DUMPBIN" - fi - fi - test -z "$NM" && NM=nm -@@ -5173,7 +5215,7 @@ - $as_echo_n "(cached) " >&6 - else - i=0 -- teststring=ABCD -+ teststring="ABCD" - - case $build_os in - msdosdjgpp*) -@@ -5213,7 +5255,7 @@ - lt_cv_sys_max_cmd_len=8192; - ;; - -- bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*) -+ netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) - # This has been around since 386BSD, at least. Likely further. - if test -x /sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` -@@ -5263,23 +5305,22 @@ - ;; - *) - lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` -- if test -n "$lt_cv_sys_max_cmd_len" && \ -- test undefined != "$lt_cv_sys_max_cmd_len"; then -+ if test -n "$lt_cv_sys_max_cmd_len"; then - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - else - # Make teststring a little bigger before we do anything with it. - # a 1K string should be a reasonable start. -- for i in 1 2 3 4 5 6 7 8; do -+ for i in 1 2 3 4 5 6 7 8 ; do - teststring=$teststring$teststring - done - SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} - # If test is not a shell built-in, we'll probably end up computing a - # maximum length that is only half of the actual maximum length, but - # we can't tell. -- while { test X`env echo "$teststring$teststring" 2>/dev/null` \ -+ while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ - = "X$teststring$teststring"; } >/dev/null 2>&1 && -- test 17 != "$i" # 1/2 MB should be enough -+ test $i != 17 # 1/2 MB should be enough - do - i=`expr $i + 1` - teststring=$teststring$teststring -@@ -5297,7 +5338,7 @@ - - fi - --if test -n "$lt_cv_sys_max_cmd_len"; then -+if test -n $lt_cv_sys_max_cmd_len ; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 - $as_echo "$lt_cv_sys_max_cmd_len" >&6; } - else -@@ -5315,6 +5356,30 @@ - : ${MV="mv -f"} - : ${RM="rm -f"} - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 -+$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } -+# Try some XSI features -+xsi_shell=no -+( _lt_dummy="a/b/c" -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ -+ && eval 'test $(( 1 + 1 )) -eq 2 \ -+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ -+ && xsi_shell=yes -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 -+$as_echo "$xsi_shell" >&6; } -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 -+$as_echo_n "checking whether the shell understands \"+=\"... " >&6; } -+lt_shell_append=no -+( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ -+ >/dev/null 2>&1 \ -+ && lt_shell_append=yes -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 -+$as_echo "$lt_shell_append" >&6; } -+ -+ - if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - lt_unset=unset - else -@@ -5437,13 +5502,13 @@ - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - reload_cmds=false - fi - ;; - darwin*) -- if test yes = "$GCC"; then -- reload_cmds='$LTCC $LTCFLAGS -nostdlib $wl-r -o $output$reload_objs' -+ if test "$GCC" = yes; then -+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' - else - reload_cmds='$LD$reload_flag -o $output$reload_objs' - fi -@@ -5571,13 +5636,13 @@ - # Need to set the preceding variable on all platforms that support - # interlibrary dependencies. - # 'none' -- dependencies not supported. --# 'unknown' -- same as none, but documents that we really don't know. -+# `unknown' -- same as none, but documents that we really don't know. - # 'pass_all' -- all dependencies passed with no checks. - # 'test_compile' -- check by making test program. - # 'file_magic [[regex]]' -- check by looking for files in library path --# that responds to the $file_magic_cmd with a given extended regex. --# If you have 'file' or equivalent on your system and you're not sure --# whether 'pass_all' will *always* work, you probably want this one. -+# which responds to the $file_magic_cmd with a given extended regex. -+# If you have `file' or equivalent on your system and you're not sure -+# whether `pass_all' will *always* work, you probably want this one. - - case $host_os in - aix[4-9]*) -@@ -5604,7 +5669,8 @@ - # Base MSYS/MinGW do not provide the 'file' command needed by - # func_win32_libid shell function, so use a weaker test based on 'objdump', - # unless we find 'file', for example because we are cross-compiling. -- if ( file / ) >/dev/null 2>&1; then -+ # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. -+ if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -@@ -5640,6 +5706,10 @@ - fi - ;; - -+gnu*) -+ lt_cv_deplibs_check_method=pass_all -+ ;; -+ - haiku*) - lt_cv_deplibs_check_method=pass_all - ;; -@@ -5678,7 +5748,7 @@ - ;; - - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - -@@ -5700,8 +5770,8 @@ - lt_cv_deplibs_check_method=pass_all - ;; - --openbsd* | bitrig*) -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+openbsd*) -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' -@@ -5754,9 +5824,6 @@ - tpf*) - lt_cv_deplibs_check_method=pass_all - ;; --os2*) -- lt_cv_deplibs_check_method=pass_all -- ;; - esac - - fi -@@ -5914,8 +5981,8 @@ - - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) -- # two different shell functions defined in ltmain.sh; -- # decide which one to use based on capabilities of $DLLTOOL -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL - case `$DLLTOOL --help 2>&1` in - *--identify-strict*) - lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -@@ -5927,7 +5994,7 @@ - ;; - *) - # fallback: assume linklib IS sharedlib -- lt_cv_sharedlib_from_linklib_cmd=$ECHO -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" - ;; - esac - -@@ -6081,7 +6148,7 @@ - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -- if test 0 -eq "$ac_status"; then -+ if test "$ac_status" -eq 0; then - # Ensure the archiver fails upon bogus file names. - rm -f conftest.$ac_objext libconftest.a - { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -@@ -6089,7 +6156,7 @@ - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -- if test 0 -ne "$ac_status"; then -+ if test "$ac_status" -ne 0; then - lt_cv_ar_at_file=@ - fi - fi -@@ -6102,7 +6169,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 - $as_echo "$lt_cv_ar_at_file" >&6; } - --if test no = "$lt_cv_ar_at_file"; then -+if test "x$lt_cv_ar_at_file" = xno; then - archiver_list_spec= - else - archiver_list_spec=$lt_cv_ar_at_file -@@ -6319,7 +6386,7 @@ - - if test -n "$RANLIB"; then - case $host_os in -- bitrig* | openbsd*) -+ openbsd*) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" - ;; - *) -@@ -6409,7 +6476,7 @@ - symcode='[ABCDGISTW]' - ;; - hpux*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - symcode='[ABCDEGRST]' - fi - ;; -@@ -6442,44 +6509,14 @@ - symcode='[ABCDGIRSTW]' ;; - esac - --if test "$lt_cv_nm_interface" = "MS dumpbin"; then -- # Gets list of data symbols to import. -- lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'" -- # Adjust the below global symbol transforms to fixup imported variables. -- lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'" -- lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'" -- lt_c_name_lib_hook="\ -- -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\ -- -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'" --else -- # Disable hooks by default. -- lt_cv_sys_global_symbol_to_import= -- lt_cdecl_hook= -- lt_c_name_hook= -- lt_c_name_lib_hook= --fi -- - # Transform an extracted symbol line into a proper C declaration. - # Some systems (esp. on ia64) link data and code symbols differently, - # so use this general approach. --lt_cv_sys_global_symbol_to_cdecl="sed -n"\ --$lt_cdecl_hook\ --" -e 's/^T .* \(.*\)$/extern int \1();/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'" -+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n"\ --$lt_c_name_hook\ --" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'" -- --# Transform an extracted symbol line into symbol name with lib prefix and --# symbol address. --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\ --$lt_c_name_lib_hook\ --" -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ --" -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\ --" -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -6497,24 +6534,21 @@ - - # Write the raw and C identifiers. - if test "$lt_cv_nm_interface" = "MS dumpbin"; then -- # Fake it for dumpbin and say T for any non-static function, -- # D for any global variable and I for any imported variable. -+ # Fake it for dumpbin and say T for any non-static function -+ # and D for any global variable. - # Also find C++ and __fastcall symbols from MSVC++, - # which start with @ or ?. - lt_cv_sys_global_symbol_pipe="$AWK '"\ - " {last_section=section; section=\$ 3};"\ - " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ - " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ --" /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\ --" /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\ --" /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\ - " \$ 0!~/External *\|/{next};"\ - " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ - " {if(hide[section]) next};"\ --" {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\ --" {split(\$ 0,a,/\||\r/); split(a[2],s)};"\ --" s[1]~/^[@?]/{print f,s[1],s[1]; next};"\ --" s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\ -+" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ -+" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ -+" s[1]~/^[@?]/{print s[1], s[1]; next};"\ -+" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ - " ' prfx=^$ac_symprfx" - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" -@@ -6562,11 +6596,11 @@ - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext - /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ --#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE --/* DATA imports from DLLs on WIN32 can't be const, because runtime -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime - relocations are performed -- see ld's documentation on pseudo-relocs. */ - # define LT_DLSYM_CONST --#elif defined __osf__ -+#elif defined(__osf__) - /* This system does not cope well with relocations in const data. */ - # define LT_DLSYM_CONST - #else -@@ -6592,7 +6626,7 @@ - { - { "@PROGRAM@", (void *) 0 }, - _LT_EOF -- $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext -+ $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext - cat <<\_LT_EOF >> conftest.$ac_ext - {0, (void *) 0} - }; -@@ -6612,13 +6646,13 @@ - mv conftest.$ac_objext conftstm.$ac_objext - lt_globsym_save_LIBS=$LIBS - lt_globsym_save_CFLAGS=$CFLAGS -- LIBS=conftstm.$ac_objext -+ LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -- test $ac_status = 0; } && test -s conftest$ac_exeext; then -+ test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi - LIBS=$lt_globsym_save_LIBS -@@ -6639,7 +6673,7 @@ - rm -rf conftest* conftst* - - # Do not use the global_symbol_pipe unless it works. -- if test yes = "$pipe_works"; then -+ if test "$pipe_works" = yes; then - break - else - lt_cv_sys_global_symbol_pipe= -@@ -6692,16 +6726,6 @@ - - - -- -- -- -- -- -- -- -- -- -- - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 - $as_echo_n "checking for sysroot... " >&6; } - -@@ -6714,9 +6738,9 @@ - - - lt_sysroot= --case $with_sysroot in #( -+case ${with_sysroot} in #( - yes) -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - lt_sysroot=`$CC --print-sysroot 2>/dev/null` - fi - ;; #( -@@ -6726,8 +6750,8 @@ - no|'') - ;; #( - *) -- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_sysroot" >&5 --$as_echo "$with_sysroot" >&6; } -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5 -+$as_echo "${with_sysroot}" >&6; } - as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 - ;; - esac -@@ -6739,99 +6763,18 @@ - - - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a working dd" >&5 --$as_echo_n "checking for a working dd... " >&6; } --if ${ac_cv_path_lt_DD+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- printf 0123456789abcdef0123456789abcdef >conftest.i --cat conftest.i conftest.i >conftest2.i --: ${lt_DD:=$DD} --if test -z "$lt_DD"; then -- ac_path_lt_DD_found=false -- # Loop through the user's path and test for each of PROGNAME-LIST -- as_save_IFS=$IFS; IFS=$PATH_SEPARATOR --for as_dir in $PATH --do -- IFS=$as_save_IFS -- test -z "$as_dir" && as_dir=. -- for ac_prog in dd; do -- for ac_exec_ext in '' $ac_executable_extensions; do -- ac_path_lt_DD="$as_dir/$ac_prog$ac_exec_ext" -- as_fn_executable_p "$ac_path_lt_DD" || continue --if "$ac_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then -- cmp -s conftest.i conftest.out \ -- && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=: --fi -- $ac_path_lt_DD_found && break 3 -- done -- done -- done --IFS=$as_save_IFS -- if test -z "$ac_cv_path_lt_DD"; then -- : -- fi --else -- ac_cv_path_lt_DD=$lt_DD --fi -- --rm -f conftest.i conftest2.i conftest.out --fi --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_lt_DD" >&5 --$as_echo "$ac_cv_path_lt_DD" >&6; } -- -- --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to truncate binary pipes" >&5 --$as_echo_n "checking how to truncate binary pipes... " >&6; } --if ${lt_cv_truncate_bin+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- printf 0123456789abcdef0123456789abcdef >conftest.i --cat conftest.i conftest.i >conftest2.i --lt_cv_truncate_bin= --if "$ac_cv_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then -- cmp -s conftest.i conftest.out \ -- && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1" --fi --rm -f conftest.i conftest2.i conftest.out --test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q" --fi --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_truncate_bin" >&5 --$as_echo "$lt_cv_truncate_bin" >&6; } -- -- -- -- -- -- -- --# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. --func_cc_basename () --{ -- for cc_temp in $*""; do -- case $cc_temp in -- compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -- distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -- \-*) ;; -- *) break;; -- esac -- done -- func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` --} -- - # Check whether --enable-libtool-lock was given. - if test "${enable_libtool_lock+set}" = set; then : - enableval=$enable_libtool_lock; - fi - --test no = "$enable_libtool_lock" || enable_libtool_lock=yes -+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes - - # Some flags need to be propagated to the compiler or linker for good - # libtool support. - case $host in - ia64-*-hpux*) -- # Find out what ABI is being produced by ac_compile, and set mode -- # options accordingly. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 -@@ -6840,25 +6783,24 @@ - test $ac_status = 0; }; then - case `/usr/bin/file conftest.$ac_objext` in - *ELF-32*) -- HPUX_IA64_MODE=32 -+ HPUX_IA64_MODE="32" - ;; - *ELF-64*) -- HPUX_IA64_MODE=64 -+ HPUX_IA64_MODE="64" - ;; - esac - fi - rm -rf conftest* - ;; - *-*-irix6*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -+ # Find out which ABI we are using. - echo '#line '$LINENO' "configure"' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -melf32bsmip" -@@ -6887,50 +6829,9 @@ - rm -rf conftest* - ;; - --mips64*-*linux*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -- echo '#line '$LINENO' "configure"' > conftest.$ac_ext -- if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 -- (eval $ac_compile) 2>&5 -- ac_status=$? -- $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -- test $ac_status = 0; }; then -- emul=elf -- case `/usr/bin/file conftest.$ac_objext` in -- *32-bit*) -- emul="${emul}32" -- ;; -- *64-bit*) -- emul="${emul}64" -- ;; -- esac -- case `/usr/bin/file conftest.$ac_objext` in -- *MSB*) -- emul="${emul}btsmip" -- ;; -- *LSB*) -- emul="${emul}ltsmip" -- ;; -- esac -- case `/usr/bin/file conftest.$ac_objext` in -- *N32*) -- emul="${emul}n32" -- ;; -- esac -- LD="${LD-ld} -m $emul" -- fi -- rm -rf conftest* -- ;; -- --x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ -+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ - s390*-*linux*|s390*-*tpf*|sparc*-*linux*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. Note that the listed cases only cover the -- # situations where additional linker options are needed (such as when -- # doing 32-bit compilation for a host where ld defaults to 64-bit, or -- # vice versa); the common cases where no linker options are needed do -- # not appear in the list. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 -@@ -6944,19 +6845,9 @@ - LD="${LD-ld} -m elf_i386_fbsd" - ;; - x86_64-*linux*) -- case `/usr/bin/file conftest.o` in -- *x86-64*) -- LD="${LD-ld} -m elf32_x86_64" -- ;; -- *) -- LD="${LD-ld} -m elf_i386" -- ;; -- esac -- ;; -- powerpc64le-*linux*) -- LD="${LD-ld} -m elf32lppclinux" -+ LD="${LD-ld} -m elf_i386" - ;; -- powerpc64-*linux*) -+ ppc64-*linux*|powerpc64-*linux*) - LD="${LD-ld} -m elf32ppclinux" - ;; - s390x-*linux*) -@@ -6975,10 +6866,7 @@ - x86_64-*linux*) - LD="${LD-ld} -m elf_x86_64" - ;; -- powerpcle-*linux*) -- LD="${LD-ld} -m elf64lppc" -- ;; -- powerpc-*linux*) -+ ppc*-*linux*|powerpc*-*linux*) - LD="${LD-ld} -m elf64ppc" - ;; - s390*-*linux*|s390*-*tpf*) -@@ -6996,7 +6884,7 @@ - - *-*-sco3.2v5*) - # On SCO OpenServer 5, we need -belf to get full-featured binaries. -- SAVE_CFLAGS=$CFLAGS -+ SAVE_CFLAGS="$CFLAGS" - CFLAGS="$CFLAGS -belf" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 - $as_echo_n "checking whether the C compiler needs -belf... " >&6; } -@@ -7036,14 +6924,13 @@ - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 - $as_echo "$lt_cv_cc_needs_belf" >&6; } -- if test yes != "$lt_cv_cc_needs_belf"; then -+ if test x"$lt_cv_cc_needs_belf" != x"yes"; then - # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf -- CFLAGS=$SAVE_CFLAGS -+ CFLAGS="$SAVE_CFLAGS" - fi - ;; - *-*solaris*) -- # Find out what ABI is being produced by ac_compile, and set linker -- # options accordingly. -+ # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 -@@ -7055,7 +6942,7 @@ - case $lt_cv_prog_gnu_ld in - yes*) - case $host in -- i?86-*-solaris*|x86_64-*-solaris*) -+ i?86-*-solaris*) - LD="${LD-ld} -m elf_x86_64" - ;; - sparc*-*-solaris*) -@@ -7064,7 +6951,7 @@ - esac - # GNU ld 2.21 introduced _sol2 emulations. Use them if available. - if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then -- LD=${LD-ld}_sol2 -+ LD="${LD-ld}_sol2" - fi - ;; - *) -@@ -7080,7 +6967,7 @@ - ;; - esac - --need_locks=$enable_libtool_lock -+need_locks="$enable_libtool_lock" - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -@@ -7191,7 +7078,7 @@ - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 - $as_echo "$lt_cv_path_mainfest_tool" >&6; } --if test yes != "$lt_cv_path_mainfest_tool"; then -+if test "x$lt_cv_path_mainfest_tool" != xyes; then - MANIFEST_TOOL=: - fi - -@@ -7694,7 +7581,7 @@ - $as_echo_n "(cached) " >&6 - else - lt_cv_apple_cc_single_mod=no -- if test -z "$LT_MULTI_MODULE"; then -+ if test -z "${LT_MULTI_MODULE}"; then - # By default we will add the -single_module flag. You can override - # by either setting the environment variable LT_MULTI_MODULE - # non-empty at configure time, or by adding -multi_module to the -@@ -7712,7 +7599,7 @@ - cat conftest.err >&5 - # Otherwise, if the output was created with a 0 exit code from - # the compiler, it worked. -- elif test -f libconftest.dylib && test 0 = "$_lt_result"; then -+ elif test -f libconftest.dylib && test $_lt_result -eq 0; then - lt_cv_apple_cc_single_mod=yes - else - cat conftest.err >&5 -@@ -7751,7 +7638,7 @@ - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 -@@ -7780,7 +7667,7 @@ - _lt_result=$? - if test -s conftest.err && $GREP force_load conftest.err; then - cat conftest.err >&5 -- elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then -+ elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then - lt_cv_ld_force_load=yes - else - cat conftest.err >&5 -@@ -7793,32 +7680,32 @@ - $as_echo "$lt_cv_ld_force_load" >&6; } - case $host_os in - rhapsody* | darwin1.[012]) -- _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; - darwin1.*) -- _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - darwin*) # darwin 5.x on - # if running on 10.5 or later, the deployment target defaults - # to the OS version, if on x86, and 10.4, the deployment - # target defaults to 10.4. Don't you love it? - case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in - 10.0,*86*-darwin8*|10.0,*-darwin[91]*) -- _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; -- 10.[012][,.]*) -- _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; -+ 10.[012]*) -+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - 10.*) -- _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; -+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - esac - ;; - esac -- if test yes = "$lt_cv_apple_cc_single_mod"; then -+ if test "$lt_cv_apple_cc_single_mod" = "yes"; then - _lt_dar_single_mod='$single_module' - fi -- if test yes = "$lt_cv_ld_exported_symbols_list"; then -- _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym' -+ if test "$lt_cv_ld_exported_symbols_list" = "yes"; then -+ _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' - else -- _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib' -+ _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' - fi -- if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then -+ if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then - _lt_dsymutil='~$DSYMUTIL $lib || :' - else - _lt_dsymutil= -@@ -7826,41 +7713,6 @@ - ;; - esac - --# func_munge_path_list VARIABLE PATH --# ----------------------------------- --# VARIABLE is name of variable containing _space_ separated list of --# directories to be munged by the contents of PATH, which is string --# having a format: --# "DIR[:DIR]:" --# string "DIR[ DIR]" will be prepended to VARIABLE --# ":DIR[:DIR]" --# string "DIR[ DIR]" will be appended to VARIABLE --# "DIRP[:DIRP]::[DIRA:]DIRA" --# string "DIRP[ DIRP]" will be prepended to VARIABLE and string --# "DIRA[ DIRA]" will be appended to VARIABLE --# "DIR[:DIR]" --# VARIABLE will be replaced by "DIR[ DIR]" --func_munge_path_list () --{ -- case x$2 in -- x) -- ;; -- *:) -- eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\" -- ;; -- x:*) -- eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\" -- ;; -- *::*) -- eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" -- eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\" -- ;; -- *) -- eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\" -- ;; -- esac --} -- - for ac_header in dlfcn.h - do : - ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default -@@ -7897,14 +7749,14 @@ - *) - enable_shared=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_shared=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac - else -@@ -7928,14 +7780,14 @@ - *) - enable_static=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_static=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac - else -@@ -7959,14 +7811,14 @@ - *) - pic_mode=default - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for lt_pkg in $withval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$lt_pkg" = "X$lt_p"; then - pic_mode=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac - else -@@ -7974,6 +7826,8 @@ - fi - - -+test -z "$pic_mode" && pic_mode=default -+ - - - -@@ -7989,14 +7843,14 @@ - *) - enable_fast_install=no - # Look at the argument we got. We use all the common list separators. -- lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, -+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_fast_install=yes - fi - done -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - ;; - esac - else -@@ -8010,63 +7864,11 @@ - - - -- shared_archive_member_spec= --case $host,$enable_shared in --power*-*-aix[5-9]*,yes) -- { $as_echo "$as_me:${as_lineno-$LINENO}: checking which variant of shared library versioning to provide" >&5 --$as_echo_n "checking which variant of shared library versioning to provide... " >&6; } -- --# Check whether --with-aix-soname was given. --if test "${with_aix_soname+set}" = set; then : -- withval=$with_aix_soname; case $withval in -- aix|svr4|both) -- ;; -- *) -- as_fn_error $? "Unknown argument to --with-aix-soname" "$LINENO" 5 -- ;; -- esac -- lt_cv_with_aix_soname=$with_aix_soname --else -- if ${lt_cv_with_aix_soname+:} false; then : -- $as_echo_n "(cached) " >&6 --else -- lt_cv_with_aix_soname=aix --fi -- -- with_aix_soname=$lt_cv_with_aix_soname --fi -- -- { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_aix_soname" >&5 --$as_echo "$with_aix_soname" >&6; } -- if test aix != "$with_aix_soname"; then -- # For the AIX way of multilib, we name the shared archive member -- # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o', -- # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File. -- # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag, -- # the AIX toolchain works better with OBJECT_MODE set (default 32). -- if test 64 = "${OBJECT_MODE-32}"; then -- shared_archive_member_spec=shr_64 -- else -- shared_archive_member_spec=shr -- fi -- fi -- ;; --*) -- with_aix_soname=aix -- ;; --esac -- -- -- -- -- -- -- - - - - # This can be used to rebuild libtool when needed --LIBTOOL_DEPS=$ltmain -+LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. - LIBTOOL='$(SHELL) $(top_builddir)/libtool' -@@ -8115,7 +7917,7 @@ - - - --if test -n "${ZSH_VERSION+set}"; then -+if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - -@@ -8154,7 +7956,7 @@ - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. -- if test set != "${COLLECT_NAMES+set}"; then -+ if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi -@@ -8165,14 +7967,14 @@ - ofile=libtool - can_build_shared=yes - --# All known linkers require a '.a' archive for static linking (except MSVC, -+# All known linkers require a `.a' archive for static linking (except MSVC, - # which needs '.lib'). - libext=a - --with_gnu_ld=$lt_cv_prog_gnu_ld -+with_gnu_ld="$lt_cv_prog_gnu_ld" - --old_CC=$CC --old_CFLAGS=$CFLAGS -+old_CC="$CC" -+old_CFLAGS="$CFLAGS" - - # Set sane defaults for various variables - test -z "$CC" && CC=cc -@@ -8181,8 +7983,15 @@ - test -z "$LD" && LD=ld - test -z "$ac_objext" && ac_objext=o - --func_cc_basename $compiler --cc_basename=$func_cc_basename_result -+for cc_temp in $compiler""; do -+ case $cc_temp in -+ compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -+ distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -+ \-*) ;; -+ *) break;; -+ esac -+done -+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - - - # Only perform the check for file, if the check method requires it -@@ -8197,22 +8006,22 @@ - else - case $MAGIC_CMD in - [\\/*] | ?:[\\/]*) -- lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. -+ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; - *) -- lt_save_MAGIC_CMD=$MAGIC_CMD -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_MAGIC_CMD="$MAGIC_CMD" -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" - for ac_dir in $ac_dummy; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- if test -f "$ac_dir/${ac_tool_prefix}file"; then -- lt_cv_path_MAGIC_CMD=$ac_dir/"${ac_tool_prefix}file" -+ if test -f $ac_dir/${ac_tool_prefix}file; then -+ lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` -- MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : -@@ -8235,13 +8044,13 @@ - break - fi - done -- IFS=$lt_save_ifs -- MAGIC_CMD=$lt_save_MAGIC_CMD -+ IFS="$lt_save_ifs" -+ MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; - esac - fi - --MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if test -n "$MAGIC_CMD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 - $as_echo "$MAGIC_CMD" >&6; } -@@ -8263,22 +8072,22 @@ - else - case $MAGIC_CMD in - [\\/*] | ?:[\\/]*) -- lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. -+ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; - *) -- lt_save_MAGIC_CMD=$MAGIC_CMD -- lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR -+ lt_save_MAGIC_CMD="$MAGIC_CMD" -+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" - for ac_dir in $ac_dummy; do -- IFS=$lt_save_ifs -+ IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. -- if test -f "$ac_dir/file"; then -- lt_cv_path_MAGIC_CMD=$ac_dir/"file" -+ if test -f $ac_dir/file; then -+ lt_cv_path_MAGIC_CMD="$ac_dir/file" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` -- MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : -@@ -8301,13 +8110,13 @@ - break - fi - done -- IFS=$lt_save_ifs -- MAGIC_CMD=$lt_save_MAGIC_CMD -+ IFS="$lt_save_ifs" -+ MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; - esac - fi - --MAGIC_CMD=$lt_cv_path_MAGIC_CMD -+MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if test -n "$MAGIC_CMD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 - $as_echo "$MAGIC_CMD" >&6; } -@@ -8328,7 +8137,7 @@ - - # Use C for the default configuration in the libtool script - --lt_save_CC=$CC -+lt_save_CC="$CC" - ac_ext=c - ac_cpp='$CPP $CPPFLAGS' - ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -@@ -8386,7 +8195,7 @@ - - lt_prog_compiler_no_builtin_flag= - --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - case $cc_basename in - nvcc*) - lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; -@@ -8402,7 +8211,7 @@ - lt_cv_prog_compiler_rtti_exceptions=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext -- lt_compiler_flag="-fno-rtti -fno-exceptions" ## exclude from sc_useless_quotes_in_assignment -+ lt_compiler_flag="-fno-rtti -fno-exceptions" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins -@@ -8432,7 +8241,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 - $as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } - --if test yes = "$lt_cv_prog_compiler_rtti_exceptions"; then -+if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then - lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" - else - : -@@ -8450,18 +8259,17 @@ - lt_prog_compiler_static= - - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_static='-static' - - case $host_os in - aix*) - # All AIX code is PIC. -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static='-Bstatic' - fi -- lt_prog_compiler_pic='-fPIC' - ;; - - amigaos*) -@@ -8472,8 +8280,8 @@ - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but -- # adding the '-m68020' flag to GCC prevents building anything better, -- # like '-m68040'. -+ # adding the `-m68020' flag to GCC prevents building anything better, -+ # like `-m68040'. - lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' - ;; - esac -@@ -8489,11 +8297,6 @@ - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - lt_prog_compiler_pic='-DDLL_EXPORT' -- case $host_os in -- os2*) -- lt_prog_compiler_static='$wl-static' -- ;; -- esac - ;; - - darwin* | rhapsody*) -@@ -8564,7 +8367,7 @@ - case $host_os in - aix*) - lt_prog_compiler_wl='-Wl,' -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static='-Bstatic' - else -@@ -8572,29 +8375,10 @@ - fi - ;; - -- darwin* | rhapsody*) -- # PIC is the default on this platform -- # Common symbols not allowed in MH_DYLIB files -- lt_prog_compiler_pic='-fno-common' -- case $cc_basename in -- nagfor*) -- # NAG Fortran compiler -- lt_prog_compiler_wl='-Wl,-Wl,,' -- lt_prog_compiler_pic='-PIC' -- lt_prog_compiler_static='-Bstatic' -- ;; -- esac -- ;; -- - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - lt_prog_compiler_pic='-DDLL_EXPORT' -- case $host_os in -- os2*) -- lt_prog_compiler_static='$wl-static' -- ;; -- esac - ;; - - hpux9* | hpux10* | hpux11*) -@@ -8610,7 +8394,7 @@ - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? -- lt_prog_compiler_static='$wl-a ${wl}archive' -+ lt_prog_compiler_static='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) -@@ -8619,9 +8403,9 @@ - lt_prog_compiler_static='-non_shared' - ;; - -- linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+ linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in -- # old Intel for x86_64, which still supported -KPIC. -+ # old Intel for x86_64 which still supported -KPIC. - ecc*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-KPIC' -@@ -8646,12 +8430,6 @@ - lt_prog_compiler_pic='-PIC' - lt_prog_compiler_static='-Bstatic' - ;; -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- lt_prog_compiler_wl='-Wl,' -- lt_prog_compiler_pic='-fPIC' -- lt_prog_compiler_static='-static' -- ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -8749,7 +8527,7 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - lt_prog_compiler_pic='-Kconform_pic' - lt_prog_compiler_static='-Bstatic' - fi -@@ -8778,7 +8556,7 @@ - fi - - case $host_os in -- # For platforms that do not support PIC, -DPIC is meaningless: -+ # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - lt_prog_compiler_pic= - ;; -@@ -8810,7 +8588,7 @@ - lt_cv_prog_compiler_pic_works=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext -- lt_compiler_flag="$lt_prog_compiler_pic -DPIC" ## exclude from sc_useless_quotes_in_assignment -+ lt_compiler_flag="$lt_prog_compiler_pic -DPIC" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins -@@ -8840,7 +8618,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 - $as_echo "$lt_cv_prog_compiler_pic_works" >&6; } - --if test yes = "$lt_cv_prog_compiler_pic_works"; then -+if test x"$lt_cv_prog_compiler_pic_works" = xyes; then - case $lt_prog_compiler_pic in - "" | " "*) ;; - *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; -@@ -8872,7 +8650,7 @@ - $as_echo_n "(cached) " >&6 - else - lt_cv_prog_compiler_static_works=no -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $lt_tmp_static_flag" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then -@@ -8891,13 +8669,13 @@ - fi - fi - $RM -r conftest* -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 - $as_echo "$lt_cv_prog_compiler_static_works" >&6; } - --if test yes = "$lt_cv_prog_compiler_static_works"; then -+if test x"$lt_cv_prog_compiler_static_works" = xyes; then - : - else - lt_prog_compiler_static= -@@ -9017,8 +8795,8 @@ - - - --hard_links=nottested --if test no = "$lt_cv_prog_compiler_c_o" && test no != "$need_locks"; then -+hard_links="nottested" -+if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 - $as_echo_n "checking if we can lock with hard links... " >&6; } -@@ -9030,9 +8808,9 @@ - ln conftest.a conftest.b 2>/dev/null && hard_links=no - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 - $as_echo "$hard_links" >&6; } -- if test no = "$hard_links"; then -- { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&5 --$as_echo "$as_me: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&2;} -+ if test "$hard_links" = no; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -+$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} - need_locks=warn - fi - else -@@ -9075,9 +8853,9 @@ - # included in the symbol list - include_expsyms= - # exclude_expsyms can be an extended regexp of symbols to exclude -- # it will be wrapped by ' (' and ')$', so one must not match beginning or -- # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', -- # as well as any symbol that contains 'd'. -+ # it will be wrapped by ` (' and `)$', so one must not match beginning or -+ # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', -+ # as well as any symbol that contains `d'. - exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if -@@ -9092,7 +8870,7 @@ - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. -- if test yes != "$GCC"; then -+ if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; -@@ -9100,7 +8878,7 @@ - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; -- openbsd* | bitrig*) -+ openbsd*) - with_gnu_ld=no - ;; - esac -@@ -9110,7 +8888,7 @@ - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility -@@ -9132,24 +8910,24 @@ - esac - fi - -- if test yes = "$lt_use_gnu_ld_interface"; then -+ if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty -- wlarc='$wl' -+ wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -- export_dynamic_flag_spec='$wl--export-dynamic' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' -+ export_dynamic_flag_spec='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then -- whole_archive_flag_spec=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' -+ whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - whole_archive_flag_spec= - fi - supports_anon_versioning=no -- case `$LD -v | $SED -e 's/(^)\+)\s\+//' 2>&1` in -+ case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... -@@ -9162,7 +8940,7 @@ - case $host_os in - aix[3-9]*) - # On AIX/PPC, the GNU linker is very broken -- if test ia64 != "$host_cpu"; then -+ if test "$host_cpu" != ia64; then - ld_shlibs=no - cat <<_LT_EOF 1>&2 - -@@ -9181,7 +8959,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='' - ;; - m68k) -@@ -9197,7 +8975,7 @@ - allow_undefined_flag=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME -- archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - ld_shlibs=no - fi -@@ -9207,7 +8985,7 @@ - # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec='-L$libdir' -- export_dynamic_flag_spec='$wl--export-all-symbols' -+ export_dynamic_flag_spec='${wl}--export-all-symbols' - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -@@ -9215,89 +8993,61 @@ - exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file, use it as -- # is; otherwise, prepend EXPORTS... -- archive_expsym_cmds='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs=no - fi - ;; - - haiku*) -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - link_all_deplibs=yes - ;; - -- os2*) -- hardcode_libdir_flag_spec='-L$libdir' -- hardcode_minus_L=yes -- allow_undefined_flag=unsupported -- shrext_cmds=.dll -- archive_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- archive_expsym_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- old_archive_From_new_cmds='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- enable_shared_with_static_runtimes=yes -- ;; -- - interix[3-9]*) - hardcode_direct=no - hardcode_shlibpath_var=no -- hardcode_libdir_flag_spec='$wl-rpath,$libdir' -- export_dynamic_flag_spec='$wl-E' -+ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ export_dynamic_flag_spec='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -- archive_expsym_cmds='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' -+ archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no -- if test linux-dietlibc = "$host_os"; then -+ if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ -- && test no = "$tmp_diet" -+ && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler -- whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers -- whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; -@@ -9308,47 +9058,42 @@ - lf95*) # Lahey Fortran 8.1 - whole_archive_flag_spec= - tmp_sharedflag='--shared' ;; -- nagfor*) # NAGFOR 5.3 -- tmp_sharedflag='-Wl,-shared' ;; - xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 -- whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 -- whole_archive_flag_spec='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac -- archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in -- tcc*) -- export_dynamic_flag_spec='-rdynamic' -- ;; - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' -- if test yes = "$supports_anon_versioning"; then -+ if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ -- cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -- echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ -+ echo "local: *; };" >> $output_objdir/$libname.ver~ -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -9362,8 +9107,8 @@ - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -9381,8 +9126,8 @@ - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9394,7 +9139,7 @@ - ld_shlibs=no - cat <<_LT_EOF 1>&2 - --*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot -+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not - *** reliably create shared libraries on SCO systems. Therefore, libtool - *** is disabling shared libraries support. We urge you to upgrade GNU - *** binutils to release 2.16.91.0.3 or newer. Another option is to modify -@@ -9409,9 +9154,9 @@ - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9428,15 +9173,15 @@ - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi - ;; - esac - -- if test no = "$ld_shlibs"; then -+ if test "$ld_shlibs" = no; then - runpath_var= - hardcode_libdir_flag_spec= - export_dynamic_flag_spec= -@@ -9452,7 +9197,7 @@ - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - hardcode_minus_L=yes -- if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then -+ if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - hardcode_direct=unsupported -@@ -9460,57 +9205,34 @@ - ;; - - aix[4-9]*) -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' -- no_entry_flag= -+ no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. -- # -C means demangle to GNU nm, but means don't demangle to AIX nm. -- # Without the "-l" option, or with the "-B" option, AIX nm treats -- # weak defined symbols like other global defined symbols, whereas -- # GNU nm marks them as "W". -- # While the 'weak' keyword is ignored in the Export File, we need -- # it in the Import File for the 'aix-soname' feature, so we have -- # to replace the "-B" option with "-P" for AIX nm. -+ # -C means demangle to AIX nm, but means don't demangle with GNU nm -+ # Also, AIX nm treats weak defined symbols like other global -+ # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then -- export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' -+ export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else -- export_symbols_cmds='`func_echo_all $NM | $SED -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' -+ export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we -- # have runtime linking enabled, and use it for executables. -- # For shared libraries, we enable/disable runtime linking -- # depending on the kind of the shared library created - -- # when "with_aix_soname,aix_use_runtimelinking" is: -- # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables -- # "aix,yes" lib.so shared, rtl:yes, for executables -- # lib.a static archive -- # "both,no" lib.so.V(shr.o) shared, rtl:yes -- # lib.a(lib.so.V) shared, rtl:no, for executables -- # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a(lib.so.V) shared, rtl:no -- # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables -- # lib.a static archive -+ # need to do runtime linking. - case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) - for ld_flag in $LDFLAGS; do -- if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then -+ if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done -- if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then -- # With aix-soname=svr4, we create the lib.so.V shared archives only, -- # so we don't have lib.a shared libs to link our executables. -- # We have to force runtime linking in this case. -- aix_use_runtimelinking=yes -- LDFLAGS="$LDFLAGS -Wl,-brtl" -- fi - ;; - esac - -@@ -9529,21 +9251,13 @@ - hardcode_direct_absolute=yes - hardcode_libdir_separator=':' - link_all_deplibs=yes -- file_list_spec='$wl-f,' -- case $with_aix_soname,$aix_use_runtimelinking in -- aix,*) ;; # traditional, no import file -- svr4,* | *,yes) # use import file -- # The Import File defines what to hardcode. -- hardcode_direct=no -- hardcode_direct_absolute=no -- ;; -- esac -+ file_list_spec='${wl}-f,' - -- if test yes = "$GCC"; then -+ if test "$GCC" = yes; then - case $host_os in aix4.[012]|aix4.[012].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ -- collect2name=`$CC -print-prog-name=collect2` -+ collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then -@@ -9562,42 +9276,35 @@ - ;; - esac - shared_flag='-shared' -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag="$shared_flag "'$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag="$shared_flag "'${wl}-G' - fi -- # Need to ensure runtime linking is disabled for the traditional -- # shared library, or the linker may eventually find shared libraries -- # /with/ Import File - we do not want to mix them. -- shared_flag_aix='-shared' -- shared_flag_svr4='-shared $wl-G' - else - # not using gcc -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else -- if test yes = "$aix_use_runtimelinking"; then -- shared_flag='$wl-G' -+ if test "$aix_use_runtimelinking" = yes; then -+ shared_flag='${wl}-G' - else -- shared_flag='$wl-bM:SRE' -+ shared_flag='${wl}-bM:SRE' - fi -- shared_flag_aix='$wl-bM:SRE' -- shared_flag_svr4='$wl-G' - fi - fi - -- export_dynamic_flag_spec='$wl-bexpall' -+ export_dynamic_flag_spec='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - always_export_symbols=yes -- if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then -+ if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- if test set = "${lt_cv_aix_libpath+set}"; then -+ if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - if ${lt_cv_aix_libpath_+:} false; then : -@@ -9632,7 +9339,7 @@ - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath_"; then -- lt_cv_aix_libpath_=/usr/lib:/lib -+ lt_cv_aix_libpath_="/usr/lib:/lib" - fi - - fi -@@ -9640,17 +9347,17 @@ - aix_libpath=$lt_cv_aix_libpath_ - fi - -- hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath" -- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag -+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" -+ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -- if test ia64 = "$host_cpu"; then -- hardcode_libdir_flag_spec='$wl-R $libdir:/usr/lib:/lib' -+ if test "$host_cpu" = ia64; then -+ hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' - allow_undefined_flag="-z nodefs" -- archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" -+ archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. -- if test set = "${lt_cv_aix_libpath+set}"; then -+ if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath - else - if ${lt_cv_aix_libpath_+:} false; then : -@@ -9685,7 +9392,7 @@ - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath_"; then -- lt_cv_aix_libpath_=/usr/lib:/lib -+ lt_cv_aix_libpath_="/usr/lib:/lib" - fi - - fi -@@ -9693,33 +9400,21 @@ - aix_libpath=$lt_cv_aix_libpath_ - fi - -- hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath" -+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -- no_undefined_flag=' $wl-bernotok' -- allow_undefined_flag=' $wl-berok' -- if test yes = "$with_gnu_ld"; then -+ no_undefined_flag=' ${wl}-bernotok' -+ allow_undefined_flag=' ${wl}-berok' -+ if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. -- whole_archive_flag_spec='$wl--whole-archive$convenience $wl--no-whole-archive' -+ whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - whole_archive_flag_spec='$convenience' - fi - archive_cmds_need_lc=yes -- archive_expsym_cmds='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' -- # -brtl affects multiple linker settings, -berok does not and is overridden later -- compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([, ]\\)%-berok\\1%g"`' -- if test svr4 != "$with_aix_soname"; then -- # This is similar to how AIX traditionally builds its shared libraries. -- archive_expsym_cmds="$archive_expsym_cmds"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' -- fi -- if test aix != "$with_aix_soname"; then -- archive_expsym_cmds="$archive_expsym_cmds"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' -- else -- # used by -dlpreopen to get the symbols -- archive_expsym_cmds="$archive_expsym_cmds"'~$MV $output_objdir/$realname.d/$soname $output_objdir' -- fi -- archive_expsym_cmds="$archive_expsym_cmds"'~$RM -r $output_objdir/$realname.d' -+ # This is similar to how AIX traditionally builds its shared libraries. -+ archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; -@@ -9728,7 +9423,7 @@ - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='' - ;; - m68k) -@@ -9758,17 +9453,16 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' -- archive_expsym_cmds='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then -- cp "$export_symbols" "$output_objdir/$soname.def"; -- echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; -- else -- $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; -- fi~ -- $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -- linknames=' -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, )='true' - enable_shared_with_static_runtimes=yes -@@ -9777,18 +9471,18 @@ - # Don't use ranlib - old_postinstall_cmds='chmod 644 $oldlib' - postlink_cmds='lt_outputfile="@OUTPUT@"~ -- lt_tool_outputfile="@TOOL_OUTPUT@"~ -- case $lt_outputfile in -- *.exe|*.EXE) ;; -- *) -- lt_outputfile=$lt_outputfile.exe -- lt_tool_outputfile=$lt_tool_outputfile.exe -- ;; -- esac~ -- if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then -- $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -- $RM "$lt_outputfile.manifest"; -- fi' -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' - ;; - *) - # Assume MSVC wrapper -@@ -9797,7 +9491,7 @@ - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=.dll -+ shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. -@@ -9816,24 +9510,24 @@ - hardcode_direct=no - hardcode_automatic=yes - hardcode_shlibpath_var=unsupported -- if test yes = "$lt_cv_ld_force_load"; then -- whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' -+ if test "$lt_cv_ld_force_load" = "yes"; then -+ whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - - else - whole_archive_flag_spec='' - fi - link_all_deplibs=yes -- allow_undefined_flag=$_lt_dar_allow_undefined -+ allow_undefined_flag="$_lt_dar_allow_undefined" - case $cc_basename in -- ifort*|nagfor*) _lt_dar_can_shared=yes ;; -+ ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac -- if test yes = "$_lt_dar_can_shared"; then -+ if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all -- archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" -- module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" -- archive_expsym_cmds="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" -- module_expsym_cmds="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" -+ archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" -+ module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" -+ archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" -+ module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - - else - ld_shlibs=no -@@ -9875,33 +9569,33 @@ - ;; - - hpux9*) -- if test yes = "$GCC"; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ if test "$GCC" = yes; then -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else -- archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -- hardcode_libdir_flag_spec='$wl+b $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_direct=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes -- export_dynamic_flag_spec='$wl-E' -+ export_dynamic_flag_spec='${wl}-E' - ;; - - hpux10*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -- archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -- if test no = "$with_gnu_ld"; then -- hardcode_libdir_flag_spec='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_direct=yes - hardcode_direct_absolute=yes -- export_dynamic_flag_spec='$wl-E' -+ export_dynamic_flag_spec='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes -@@ -9909,25 +9603,25 @@ - ;; - - hpux11*) -- if test yes,no = "$GCC,$with_gnu_ld"; then -+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) -- archive_cmds='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) -- archive_cmds='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - -@@ -9939,7 +9633,7 @@ - $as_echo_n "(cached) " >&6 - else - lt_cv_prog_compiler__b=no -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -b" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then -@@ -9958,14 +9652,14 @@ - fi - fi - $RM -r conftest* -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 - $as_echo "$lt_cv_prog_compiler__b" >&6; } - --if test yes = "$lt_cv_prog_compiler__b"; then -- archive_cmds='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+if test x"$lt_cv_prog_compiler__b" = xyes; then -+ archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -9973,8 +9667,8 @@ - ;; - esac - fi -- if test no = "$with_gnu_ld"; then -- hardcode_libdir_flag_spec='$wl+b $wl$libdir' -+ if test "$with_gnu_ld" = no; then -+ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - - case $host_cpu in -@@ -9985,7 +9679,7 @@ - *) - hardcode_direct=yes - hardcode_direct_absolute=yes -- export_dynamic_flag_spec='$wl-E' -+ export_dynamic_flag_spec='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. -@@ -9996,8 +9690,8 @@ - ;; - - irix5* | irix6* | nonstopux*) -- if test yes = "$GCC"; then -- archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -@@ -10007,8 +9701,8 @@ - if ${lt_cv_irix_exported_symbol+:} false; then : - $as_echo_n "(cached) " >&6 - else -- save_LDFLAGS=$LDFLAGS -- LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - int foo (void) { return 0; } -@@ -10020,34 +9714,24 @@ - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS=$save_LDFLAGS -+ LDFLAGS="$save_LDFLAGS" - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 - $as_echo "$lt_cv_irix_exported_symbol" >&6; } -- if test yes = "$lt_cv_irix_exported_symbol"; then -- archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' -+ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' -+ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - archive_cmds_need_lc='no' -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - inherit_rpath=yes - link_all_deplibs=yes - ;; - -- linux*) -- case $cc_basename in -- tcc*) -- # Fabrice Bellard et al's Tiny C Compiler -- ld_shlibs=yes -- archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- ;; -- esac -- ;; -- - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out -@@ -10062,7 +9746,7 @@ - newsos6) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=yes -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_shlibpath_var=no - ;; -@@ -10070,19 +9754,27 @@ - *nto* | *qnx*) - ;; - -- openbsd* | bitrig*) -+ openbsd*) - if test -f /usr/libexec/ld.so; then - hardcode_direct=yes - hardcode_shlibpath_var=no - hardcode_direct_absolute=yes -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' -- hardcode_libdir_flag_spec='$wl-rpath,$libdir' -- export_dynamic_flag_spec='$wl-E' -+ archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' -+ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ export_dynamic_flag_spec='${wl}-E' - else -- archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -- hardcode_libdir_flag_spec='$wl-rpath,$libdir' -+ case $host_os in -+ openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) -+ archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' -+ hardcode_libdir_flag_spec='-R$libdir' -+ ;; -+ *) -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' -+ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ ;; -+ esac - fi - else - ld_shlibs=no -@@ -10093,53 +9785,33 @@ - hardcode_libdir_flag_spec='-L$libdir' - hardcode_minus_L=yes - allow_undefined_flag=unsupported -- shrext_cmds=.dll -- archive_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- archive_expsym_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ -- $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ -- $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ -- $ECHO EXPORTS >> $output_objdir/$libname.def~ -- prefix_cmds="$SED"~ -- if test EXPORTS = "`$SED 1q $export_symbols`"; then -- prefix_cmds="$prefix_cmds -e 1d"; -- fi~ -- prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ -- cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ -- $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ -- emximp -o $lib $output_objdir/$libname.def' -- old_archive_From_new_cmds='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' -- enable_shared_with_static_runtimes=yes -+ archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' -+ old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) -- if test yes = "$GCC"; then -- allow_undefined_flag=' $wl-expect_unresolved $wl\*' -- archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -+ if test "$GCC" = yes; then -+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -+ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - allow_undefined_flag=' -expect_unresolved \*' -- archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - archive_cmds_need_lc='no' -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag -- if test yes = "$GCC"; then -- allow_undefined_flag=' $wl-expect_unresolved $wl\*' -- archive_cmds='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' -- hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' -+ if test "$GCC" = yes; then -+ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -- archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ -- $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' -+ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - hardcode_libdir_flag_spec='-rpath $libdir' -@@ -10150,24 +9822,24 @@ - - solaris*) - no_undefined_flag=' -z defs' -- if test yes = "$GCC"; then -- wlarc='$wl' -- archive_cmds='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ wlarc='${wl}' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' -- archive_cmds='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' -+ archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' -+ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) -- wlarc='$wl' -- archive_cmds='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ wlarc='${wl}' -+ archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi -@@ -10177,11 +9849,11 @@ - solaris2.[0-5] | solaris2.[0-5].*) ;; - *) - # The compiler driver will combine and reorder linker options, -- # but understands '-z linker_flag'. GCC discards it without '$wl', -+ # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) -- if test yes = "$GCC"; then -- whole_archive_flag_spec='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' -+ if test "$GCC" = yes; then -+ whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - whole_archive_flag_spec='-z allextract$convenience -z defaultextract' - fi -@@ -10191,10 +9863,10 @@ - ;; - - sunos4*) -- if test sequent = "$host_vendor"; then -+ if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. -- archive_cmds='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -10243,43 +9915,43 @@ - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) -- no_undefined_flag='$wl-z,text' -+ no_undefined_flag='${wl}-z,text' - archive_cmds_need_lc=no - hardcode_shlibpath_var=no - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- archive_cmds='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- archive_cmds='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) -- # Note: We CANNOT use -z defs as we might desire, because we do not -+ # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. -- no_undefined_flag='$wl-z,text' -- allow_undefined_flag='$wl-z,nodefs' -+ no_undefined_flag='${wl}-z,text' -+ allow_undefined_flag='${wl}-z,nodefs' - archive_cmds_need_lc=no - hardcode_shlibpath_var=no -- hardcode_libdir_flag_spec='$wl-R,$libdir' -+ hardcode_libdir_flag_spec='${wl}-R,$libdir' - hardcode_libdir_separator=':' - link_all_deplibs=yes -- export_dynamic_flag_spec='$wl-Bexport' -+ export_dynamic_flag_spec='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - -- if test yes = "$GCC"; then -- archive_cmds='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ if test "$GCC" = yes; then -+ archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else -- archive_cmds='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -- archive_expsym_cmds='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - -@@ -10294,10 +9966,10 @@ - ;; - esac - -- if test sni = "$host_vendor"; then -+ if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) -- export_dynamic_flag_spec='$wl-Blargedynsym' -+ export_dynamic_flag_spec='${wl}-Blargedynsym' - ;; - esac - fi -@@ -10305,7 +9977,7 @@ - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 - $as_echo "$ld_shlibs" >&6; } --test no = "$ld_shlibs" && can_build_shared=no -+test "$ld_shlibs" = no && can_build_shared=no - - with_gnu_ld=$with_gnu_ld - -@@ -10331,7 +10003,7 @@ - # Assume -lc should be added - archive_cmds_need_lc=yes - -- if test yes,yes = "$GCC,$enable_shared"; then -+ if test "$enable_shared" = yes && test "$GCC" = yes; then - case $archive_cmds in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. -@@ -10546,14 +10218,14 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 - $as_echo_n "checking dynamic linker characteristics... " >&6; } - --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - case $host_os in -- darwin*) lt_awk_arg='/^libraries:/,/LR/' ;; -- *) lt_awk_arg='/^libraries:/' ;; -+ darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; -+ *) lt_awk_arg="/^libraries:/" ;; - esac - case $host_os in -- mingw* | cegcc*) lt_sed_strip_eq='s|=\([A-Za-z]:\)|\1|g' ;; -- *) lt_sed_strip_eq='s|=/|/|g' ;; -+ mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;; -+ *) lt_sed_strip_eq="s,=/,/,g" ;; - esac - lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` - case $lt_search_path_spec in -@@ -10569,35 +10241,28 @@ - ;; - esac - # Ok, now we have the path, separated by spaces, we can step through it -- # and add multilib dir if necessary... -+ # and add multilib dir if necessary. - lt_tmp_lt_search_path_spec= -- lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` -- # ...but if some path component already ends with the multilib dir we assume -- # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer). -- case "$lt_multi_os_dir; $lt_search_path_spec " in -- "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*) -- lt_multi_os_dir= -- ;; -- esac -+ lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` - for lt_sys_path in $lt_search_path_spec; do -- if test -d "$lt_sys_path$lt_multi_os_dir"; then -- lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir" -- elif test -n "$lt_multi_os_dir"; then -+ if test -d "$lt_sys_path/$lt_multi_os_dir"; then -+ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" -+ else - test -d "$lt_sys_path" && \ - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" - fi - done - lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' --BEGIN {RS = " "; FS = "/|\n";} { -- lt_foo = ""; -- lt_count = 0; -+BEGIN {RS=" "; FS="/|\n";} { -+ lt_foo=""; -+ lt_count=0; - for (lt_i = NF; lt_i > 0; lt_i--) { - if ($lt_i != "" && $lt_i != ".") { - if ($lt_i == "..") { - lt_count++; - } else { - if (lt_count == 0) { -- lt_foo = "/" $lt_i lt_foo; -+ lt_foo="/" $lt_i lt_foo; - } else { - lt_count--; - } -@@ -10611,7 +10276,7 @@ - # for these hosts. - case $host_os in - mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ -- $SED 's|/\([A-Za-z]:\)|\1|g'` ;; -+ $SED 's,/\([A-Za-z]:\),\1,g'` ;; - esac - sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` - else -@@ -10620,7 +10285,7 @@ - library_names_spec= - libname_spec='lib$name' - soname_spec= --shrext_cmds=.so -+shrext_cmds=".so" - postinstall_cmds= - postuninstall_cmds= - finish_cmds= -@@ -10637,16 +10302,14 @@ - # flags to be left without arguments - need_version=unknown - -- -- - case $host_os in - aix3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname.a' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. -- soname_spec='$libname$release$shared_ext$major' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - - aix[4-9]*) -@@ -10654,91 +10317,41 @@ - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes -- if test ia64 = "$host_cpu"; then -+ if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 -- library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with -- # the line '#! .'. This would cause the generated library to -- # depend on '.', always an invalid library. This was fixed in -+ # the line `#! .'. This would cause the generated library to -+ # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[01] | aix4.[01].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' -- echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then -+ echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac -- # Using Import Files as archive members, it is possible to support -- # filename-based versioning of shared library archives on AIX. While -- # this would work for both with and without runtime linking, it will -- # prevent static linking of such archives. So we do filename-based -- # shared library versioning with .so extension only, which is used -- # when both runtime linking and shared linking is enabled. -- # Unfortunately, runtime linking may impact performance, so we do -- # not want this to be the default eventually. Also, we use the -- # versioned .so libs for executables only if there is the -brtl -- # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. -- # To allow for filename-based versioning support, we need to create -- # libNAME.so.V as an archive file, containing: -- # *) an Import File, referring to the versioned filename of the -- # archive as well as the shared archive member, telling the -- # bitwidth (32 or 64) of that shared object, and providing the -- # list of exported symbols of that shared object, eventually -- # decorated with the 'weak' keyword -- # *) the shared object with the F_LOADONLY flag set, to really avoid -- # it being seen by the linker. -- # At run time we better use the real file rather than another symlink, -- # but for link time we create the symlink libNAME.so -> libNAME.so.V -- -- case $with_aix_soname,$aix_use_runtimelinking in -- # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct -+ # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. -- aix,yes) # traditional libtool -- dynamic_linker='AIX unversionable lib.so' -+ if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib<name>.so - # instead of lib<name>.a to let people know that these are not - # typical AIX shared libraries. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- ;; -- aix,no) # traditional AIX only -- dynamic_linker='AIX lib.a(lib.so.V)' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- ;; -- svr4,*) # full svr4 only -- dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o)" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,yes) # both, prefer svr4 -- dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o), lib.a(lib.so.V)" -- library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' -- # unpreferred sharedlib libNAME.a needs extra handling -- postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' -- postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' -- # We do not specify a path in Import Files, so LIBPATH fires. -- shlibpath_overrides_runpath=yes -- ;; -- *,no) # both, prefer aix -- dynamic_linker="AIX lib.a(lib.so.V), lib.so.V($shared_archive_member_spec.o)" -- library_names_spec='$libname$release.a $libname.a' -- soname_spec='$libname$release$shared_ext$major' -- # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling -- postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' -- postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' -- ;; -- esac -+ library_names_spec='${libname}${release}.a $libname.a' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ fi - shlibpath_var=LIBPATH - fi - ;; -@@ -10748,18 +10361,18 @@ - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. -- finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' -+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - - beos*) -- library_names_spec='$libname$shared_ext' -+ library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; -@@ -10767,8 +10380,8 @@ - bsdi[45]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" -@@ -10780,7 +10393,7 @@ - - cygwin* | mingw* | pw32* | cegcc*) - version_type=windows -- shrext_cmds=.dll -+ shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - -@@ -10789,8 +10402,8 @@ - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ -@@ -10806,17 +10419,17 @@ - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' -- soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix -- soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' -- library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' -@@ -10825,8 +10438,8 @@ - *,cl*) - # Native MSVC - libname_spec='$name' -- soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' -- library_names_spec='$libname.dll.lib' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) -@@ -10853,7 +10466,7 @@ - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) -- sys_lib_search_path_spec=$LIB -+ sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -@@ -10866,8 +10479,8 @@ - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' -@@ -10880,7 +10493,7 @@ - - *) - # Assume MSVC wrapper -- library_names_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext $libname.lib' -+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac -@@ -10893,8 +10506,8 @@ - version_type=darwin - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$major$shared_ext' -+ library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' -+ soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' -@@ -10907,8 +10520,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -10926,13 +10539,12 @@ - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac -@@ -10957,15 +10569,26 @@ - esac - ;; - -+gnu*) -+ version_type=linux # correct to gnu/linux during the next big refactor -+ need_lib_prefix=no -+ need_version=no -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ shlibpath_var=LD_LIBRARY_PATH -+ shlibpath_overrides_runpath=no -+ hardcode_into_libs=yes -+ ;; -+ - haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH -- shlibpath_overrides_runpath=no -+ shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; -@@ -10983,15 +10606,14 @@ - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -- if test 32 = "$HPUX_IA64_MODE"; then -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux32 - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" -- sys_lib_dlsearch_path_spec=/usr/lib/hpux64 - fi -+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' -@@ -10999,8 +10621,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; -@@ -11009,8 +10631,8 @@ - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... -@@ -11023,8 +10645,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -11035,7 +10657,7 @@ - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) -- if test yes = "$lt_cv_prog_gnu_ld"; then -+ if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix -@@ -11043,8 +10665,8 @@ - esac - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= -@@ -11063,8 +10685,8 @@ - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no -- sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" -- sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" -+ sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" -+ sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -@@ -11073,33 +10695,13 @@ - dynamic_linker=no - ;; - --linux*android*) -- version_type=none # Android doesn't support versioned libraries. -- need_lib_prefix=no -- need_version=no -- library_names_spec='$libname$release$shared_ext' -- soname_spec='$libname$release$shared_ext' -- finish_cmds= -- shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -- -- # This implies no fast_install, which is unacceptable. -- # Some rework will be needed to allow for fast_install -- # before this can be enabled. -- hardcode_into_libs=yes -- -- dynamic_linker='Android linker' -- # Don't embed -rpath directories since the linker doesn't support them. -- hardcode_libdir_flag_spec='-L$libdir' -- ;; -- - # This must be glibc/ELF. --linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) -+linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no -@@ -11143,15 +10745,14 @@ - # before this can be enabled. - hardcode_into_libs=yes - -- # Ideally, we could use ldconfig to report *all* directores which are -- # searched for libraries, however this is still not possible. Aside from not -- # being certain /sbin/ldconfig is available, command -- # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, -- # even though it is searched at run-time. Try to do the best guess by -- # appending ld.so.conf contents (and includes) to the search path. -+ # Add ABI-specific directories to the system library path. -+ sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" -+ -+ # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` -- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" -+ sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" -+ - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on -@@ -11168,12 +10769,12 @@ - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH -@@ -11183,7 +10784,7 @@ - - newsos6) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; -@@ -11192,68 +10793,58 @@ - version_type=qnx - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - --openbsd* | bitrig*) -+openbsd*) - version_type=sunos -- sys_lib_dlsearch_path_spec=/usr/lib -+ sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no -- if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then -- need_version=no -- else -- need_version=yes -- fi -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. -+ case $host_os in -+ openbsd3.3 | openbsd3.3.*) need_version=yes ;; -+ *) need_version=no ;; -+ esac -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH -- shlibpath_overrides_runpath=yes -+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ case $host_os in -+ openbsd2.[89] | openbsd2.[89].*) -+ shlibpath_overrides_runpath=no -+ ;; -+ *) -+ shlibpath_overrides_runpath=yes -+ ;; -+ esac -+ else -+ shlibpath_overrides_runpath=yes -+ fi - ;; - - os2*) - libname_spec='$name' -- version_type=windows -- shrext_cmds=.dll -- need_version=no -+ shrext_cmds=".dll" - need_lib_prefix=no -- # OS/2 can only load a DLL with a base name of 8 characters or less. -- soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; -- v=$($ECHO $release$versuffix | tr -d .-); -- n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); -- $ECHO $n$v`$shared_ext' -- library_names_spec='${libname}_dll.$libext' -+ library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' -- shlibpath_var=BEGINLIBPATH -- sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -- postinstall_cmds='base_file=`basename \$file`~ -- dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ -- dldir=$destdir/`dirname \$dlpath`~ -- test -d \$dldir || mkdir -p \$dldir~ -- $install_prog $dir/$dlname \$dldir/$dlname~ -- chmod a+x \$dldir/$dlname~ -- if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then -- eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; -- fi' -- postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ -- dlpath=$dir/\$dldll~ -- $RM \$dlpath' -+ shlibpath_var=LIBPATH - ;; - - osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no -- soname_spec='$libname$release$shared_ext$major' -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ soname_spec='${libname}${release}${shared_ext}$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" -- sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec -+ sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - - rdos*) -@@ -11264,8 +10855,8 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -@@ -11275,11 +10866,11 @@ - - sunos4*) - version_type=sunos -- library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes -@@ -11287,8 +10878,8 @@ - - sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) -@@ -11309,24 +10900,24 @@ - ;; - - sysv4*MP*) -- if test -d /usr/nec; then -+ if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' -- soname_spec='$libname$shared_ext.$major' -+ library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' -+ soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) -- version_type=sco -+ version_type=freebsd-elf - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes -- if test yes = "$with_gnu_ld"; then -+ if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' -@@ -11344,7 +10935,7 @@ - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes -@@ -11352,8 +10943,8 @@ - - uts4*) - version_type=linux # correct to gnu/linux during the next big refactor -- library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' -- soname_spec='$libname$release$shared_ext$major' -+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' -+ soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -@@ -11363,35 +10954,20 @@ - esac - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 - $as_echo "$dynamic_linker" >&6; } --test no = "$dynamic_linker" && can_build_shared=no -+test "$dynamic_linker" = no && can_build_shared=no - - variables_saved_for_relink="PATH $shlibpath_var $runpath_var" --if test yes = "$GCC"; then -+if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" - fi - --if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then -- sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec -+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then -+ sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" - fi -- --if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then -- sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec -+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then -+ sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" - fi - --# remember unaugmented sys_lib_dlsearch_path content for libtool script decls... --configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec -- --# ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code --func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" -- --# to be used as default LT_SYS_LIBRARY_PATH value in generated libtool --configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH -- -- -- -- -- -- - - - -@@ -11488,15 +11064,15 @@ - hardcode_action= - if test -n "$hardcode_libdir_flag_spec" || - test -n "$runpath_var" || -- test yes = "$hardcode_automatic"; then -+ test "X$hardcode_automatic" = "Xyes" ; then - - # We can hardcode non-existent directories. -- if test no != "$hardcode_direct" && -+ if test "$hardcode_direct" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one -- ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, )" && -- test no != "$hardcode_minus_L"; then -+ ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && -+ test "$hardcode_minus_L" != no; then - # Linking always hardcodes the temporary library directory. - hardcode_action=relink - else -@@ -11511,12 +11087,12 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 - $as_echo "$hardcode_action" >&6; } - --if test relink = "$hardcode_action" || -- test yes = "$inherit_rpath"; then -+if test "$hardcode_action" = relink || -+ test "$inherit_rpath" = yes; then - # Fast installation is not supported - enable_fast_install=no --elif test yes = "$shlibpath_overrides_runpath" || -- test no = "$enable_shared"; then -+elif test "$shlibpath_overrides_runpath" = yes || -+ test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless - fi -@@ -11526,7 +11102,7 @@ - - - -- if test yes != "$enable_dlopen"; then -+ if test "x$enable_dlopen" != xyes; then - enable_dlopen=unknown - enable_dlopen_self=unknown - enable_dlopen_self_static=unknown -@@ -11536,23 +11112,23 @@ - - case $host_os in - beos*) -- lt_cv_dlopen=load_add_on -+ lt_cv_dlopen="load_add_on" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ;; - - mingw* | pw32* | cegcc*) -- lt_cv_dlopen=LoadLibrary -+ lt_cv_dlopen="LoadLibrary" - lt_cv_dlopen_libs= - ;; - - cygwin*) -- lt_cv_dlopen=dlopen -+ lt_cv_dlopen="dlopen" - lt_cv_dlopen_libs= - ;; - - darwin*) -- # if libdl is installed we need to link against it -+ # if libdl is installed we need to link against it - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 - $as_echo_n "checking for dlopen in -ldl... " >&6; } - if ${ac_cv_lib_dl_dlopen+:} false; then : -@@ -11590,10 +11166,10 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 - $as_echo "$ac_cv_lib_dl_dlopen" >&6; } - if test "x$ac_cv_lib_dl_dlopen" = xyes; then : -- lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl -+ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" - else - -- lt_cv_dlopen=dyld -+ lt_cv_dlopen="dyld" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - -@@ -11601,18 +11177,10 @@ - - ;; - -- tpf*) -- # Don't try to run any link tests for TPF. We know it's impossible -- # because TPF is a cross-compiler, and we know how we open DSOs. -- lt_cv_dlopen=dlopen -- lt_cv_dlopen_libs= -- lt_cv_dlopen_self=no -- ;; -- - *) - ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" - if test "x$ac_cv_func_shl_load" = xyes; then : -- lt_cv_dlopen=shl_load -+ lt_cv_dlopen="shl_load" - else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 - $as_echo_n "checking for shl_load in -ldld... " >&6; } -@@ -11651,11 +11219,11 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 - $as_echo "$ac_cv_lib_dld_shl_load" >&6; } - if test "x$ac_cv_lib_dld_shl_load" = xyes; then : -- lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld -+ lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" - else - ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" - if test "x$ac_cv_func_dlopen" = xyes; then : -- lt_cv_dlopen=dlopen -+ lt_cv_dlopen="dlopen" - else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 - $as_echo_n "checking for dlopen in -ldl... " >&6; } -@@ -11694,7 +11262,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 - $as_echo "$ac_cv_lib_dl_dlopen" >&6; } - if test "x$ac_cv_lib_dl_dlopen" = xyes; then : -- lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl -+ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" - else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 - $as_echo_n "checking for dlopen in -lsvld... " >&6; } -@@ -11733,7 +11301,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 - $as_echo "$ac_cv_lib_svld_dlopen" >&6; } - if test "x$ac_cv_lib_svld_dlopen" = xyes; then : -- lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld -+ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" - else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 - $as_echo_n "checking for dld_link in -ldld... " >&6; } -@@ -11772,7 +11340,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 - $as_echo "$ac_cv_lib_dld_dld_link" >&6; } - if test "x$ac_cv_lib_dld_dld_link" = xyes; then : -- lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld -+ lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" - fi - - -@@ -11793,21 +11361,21 @@ - ;; - esac - -- if test no = "$lt_cv_dlopen"; then -- enable_dlopen=no -- else -+ if test "x$lt_cv_dlopen" != xno; then - enable_dlopen=yes -+ else -+ enable_dlopen=no - fi - - case $lt_cv_dlopen in - dlopen) -- save_CPPFLAGS=$CPPFLAGS -- test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" -+ save_CPPFLAGS="$CPPFLAGS" -+ test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" - -- save_LDFLAGS=$LDFLAGS -+ save_LDFLAGS="$LDFLAGS" - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" - -- save_LIBS=$LIBS -+ save_LIBS="$LIBS" - LIBS="$lt_cv_dlopen_libs $LIBS" - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 -@@ -11815,7 +11383,7 @@ - if ${lt_cv_dlopen_self+:} false; then : - $as_echo_n "(cached) " >&6 - else -- if test yes = "$cross_compiling"; then : -+ if test "$cross_compiling" = yes; then : - lt_cv_dlopen_self=cross - else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 -@@ -11862,9 +11430,9 @@ - # endif - #endif - --/* When -fvisibility=hidden is used, assume the code has been annotated -+/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ --#if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) - int fnord () __attribute__((visibility("default"))); - #endif - -@@ -11894,7 +11462,7 @@ - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -- test $ac_status = 0; } && test -s "conftest$ac_exeext" 2>/dev/null; then -+ test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&5 2>/dev/null - lt_status=$? - case x$lt_status in -@@ -11914,14 +11482,14 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 - $as_echo "$lt_cv_dlopen_self" >&6; } - -- if test yes = "$lt_cv_dlopen_self"; then -+ if test "x$lt_cv_dlopen_self" = xyes; then - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 - $as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } - if ${lt_cv_dlopen_self_static+:} false; then : - $as_echo_n "(cached) " >&6 - else -- if test yes = "$cross_compiling"; then : -+ if test "$cross_compiling" = yes; then : - lt_cv_dlopen_self_static=cross - else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 -@@ -11968,9 +11536,9 @@ - # endif - #endif - --/* When -fvisibility=hidden is used, assume the code has been annotated -+/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ --#if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) - int fnord () __attribute__((visibility("default"))); - #endif - -@@ -12000,7 +11568,7 @@ - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -- test $ac_status = 0; } && test -s "conftest$ac_exeext" 2>/dev/null; then -+ test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&5 2>/dev/null - lt_status=$? - case x$lt_status in -@@ -12021,9 +11589,9 @@ - $as_echo "$lt_cv_dlopen_self_static" >&6; } - fi - -- CPPFLAGS=$save_CPPFLAGS -- LDFLAGS=$save_LDFLAGS -- LIBS=$save_LIBS -+ CPPFLAGS="$save_CPPFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+ LIBS="$save_LIBS" - ;; - esac - -@@ -12067,7 +11635,7 @@ - # FIXME - insert some real tests, host_os isn't really good enough - case $host_os in - darwin*) -- if test -n "$STRIP"; then -+ if test -n "$STRIP" ; then - striplib="$STRIP -x" - old_striplib="$STRIP -S" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -@@ -12095,7 +11663,7 @@ - - - -- # Report what library types will actually be built -+ # Report which library types will actually be built - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 - $as_echo_n "checking if libtool supports shared libraries... " >&6; } - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 -@@ -12103,13 +11671,13 @@ - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 - $as_echo_n "checking whether to build shared libraries... " >&6; } -- test no = "$can_build_shared" && enable_shared=no -+ test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) -- test yes = "$enable_shared" && enable_static=no -+ test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' -@@ -12117,12 +11685,8 @@ - ;; - - aix[4-9]*) -- if test ia64 != "$host_cpu"; then -- case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in -- yes,aix,yes) ;; # shared object as lib.so file only -- yes,svr4,*) ;; # shared object as lib.so archive member only -- yes,*) enable_static=no ;; # shared object in lib.a archive as well -- esac -+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then -+ test "$enable_shared" = yes && enable_static=no - fi - ;; - esac -@@ -12132,7 +11696,7 @@ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 - $as_echo_n "checking whether to build static libraries... " >&6; } - # Make sure either enable_shared or enable_static is yes. -- test yes = "$enable_shared" || enable_static=yes -+ test "$enable_shared" = yes || enable_static=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 - $as_echo "$enable_static" >&6; } - -@@ -12146,7 +11710,7 @@ - ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' - ac_compiler_gnu=$ac_cv_c_compiler_gnu - --CC=$lt_save_CC -+CC="$lt_save_CC" - - - -@@ -16842,7 +16406,6 @@ - enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' - pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' - enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' --shared_archive_member_spec='`$ECHO "$shared_archive_member_spec" | $SED "$delay_single_quote_subst"`' - SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' - ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' - PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`' -@@ -16892,13 +16455,10 @@ - GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' --lt_cv_sys_global_symbol_to_import='`$ECHO "$lt_cv_sys_global_symbol_to_import" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' --lt_cv_nm_interface='`$ECHO "$lt_cv_nm_interface" | $SED "$delay_single_quote_subst"`' - nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' - lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' --lt_cv_truncate_bin='`$ECHO "$lt_cv_truncate_bin" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' -@@ -16963,8 +16523,7 @@ - finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' - hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' - sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' --configure_time_dlsearch_path='`$ECHO "$configure_time_dlsearch_path" | $SED "$delay_single_quote_subst"`' --configure_time_lt_sys_library_path='`$ECHO "$configure_time_lt_sys_library_path" | $SED "$delay_single_quote_subst"`' -+sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`' - hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' - enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' - enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' -@@ -17015,12 +16574,9 @@ - compiler \ - lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ --lt_cv_sys_global_symbol_to_import \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ --lt_cv_nm_interface \ - nm_file_list_spec \ --lt_cv_truncate_bin \ - lt_prog_compiler_no_builtin_flag \ - lt_prog_compiler_pic \ - lt_prog_compiler_wl \ -@@ -17055,7 +16611,7 @@ - striplib; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) -- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes -+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" -@@ -17082,11 +16638,10 @@ - postuninstall_cmds \ - finish_cmds \ - sys_lib_search_path_spec \ --configure_time_dlsearch_path \ --configure_time_lt_sys_library_path; do -+sys_lib_dlsearch_path_spec; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) -- eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes -+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" -@@ -17095,16 +16650,19 @@ - done - - ac_aux_dir='$ac_aux_dir' -+xsi_shell='$xsi_shell' -+lt_shell_append='$lt_shell_append' - --# See if we are running on zsh, and set the options that allow our -+# See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes INIT. --if test -n "\${ZSH_VERSION+set}"; then -+if test -n "\${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - - - PACKAGE='$PACKAGE' - VERSION='$VERSION' -+ TIMESTAMP='$TIMESTAMP' - RM='$RM' - ofile='$ofile' - -@@ -17817,52 +17375,55 @@ - ;; - "libtool":C) - -- # See if we are running on zsh, and set the options that allow our -+ # See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes. -- if test -n "${ZSH_VERSION+set}"; then -+ if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - -- cfgfile=${ofile}T -+ cfgfile="${ofile}T" - trap "$RM \"$cfgfile\"; exit 1" 1 2 15 - $RM "$cfgfile" - - cat <<_LT_EOF >> "$cfgfile" - #! $SHELL --# Generated automatically by $as_me ($PACKAGE) $VERSION --# NOTE: Changes made to this file will be lost: look at ltmain.sh. -- --# Provide generalized library-building support services. --# Written by Gordon Matzigkeit, 1996 - --# Copyright (C) 2014 Free Software Foundation, Inc. --# This is free software; see the source for copying conditions. There is NO --# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -- --# GNU Libtool is free software; you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation; either version 2 of of the License, or --# (at your option) any later version. -+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. -+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION -+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: -+# NOTE: Changes made to this file will be lost: look at ltmain.sh. -+# -+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -+# 2006, 2007, 2008, 2009, 2010, 2011 Free Software -+# Foundation, Inc. -+# Written by Gordon Matzigkeit, 1996 -+# -+# This file is part of GNU Libtool. - # --# As a special exception to the GNU General Public License, if you --# distribute this file as part of a program or library that is built --# using GNU Libtool, you may include this file under the same --# distribution terms that you use for the rest of that program. -+# GNU Libtool is free software; you can redistribute it and/or -+# modify it under the terms of the GNU General Public License as -+# published by the Free Software Foundation; either version 2 of -+# the License, or (at your option) any later version. - # --# GNU Libtool is distributed in the hope that it will be useful, but --# WITHOUT ANY WARRANTY; without even the implied warranty of -+# As a special exception to the GNU General Public License, -+# if you distribute this file as part of a program or library that -+# is built using GNU Libtool, you may include this file under the -+# same distribution terms that you use for the rest of that program. -+# -+# GNU Libtool is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of - # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - # GNU General Public License for more details. - # - # You should have received a copy of the GNU General Public License --# along with this program. If not, see <http://www.gnu.org/licenses/>. -+# along with GNU Libtool; see the file COPYING. If not, a copy -+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or -+# obtained by writing to the Free Software Foundation, Inc., -+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - - - # The names of the tagged configurations supported by this script. --available_tags='' -- --# Configured defaults for sys_lib_dlsearch_path munging. --: \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"} -+available_tags="" - - # ### BEGIN LIBTOOL CONFIG - -@@ -17882,9 +17443,6 @@ - # Whether or not to optimize for fast installation. - fast_install=$enable_fast_install - --# Shared archive member basename,for filename based shared library versioning on AIX. --shared_archive_member_spec=$shared_archive_member_spec -- - # Shell to use when invoking shell scripts. - SHELL=$lt_SHELL - -@@ -18002,27 +17560,18 @@ - # Transform the output of nm in a proper C declaration. - global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl - --# Transform the output of nm into a list of symbols to manually relocate. --global_symbol_to_import=$lt_lt_cv_sys_global_symbol_to_import -- - # Transform the output of nm in a C name address pair. - global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - --# The name lister interface. --nm_interface=$lt_lt_cv_nm_interface -- - # Specify filename containing input files for \$NM. - nm_file_list_spec=$lt_nm_file_list_spec - --# The root where to search for dependent libraries,and where our libraries should be installed. -+# The root where to search for dependent libraries,and in which our libraries should be installed. - lt_sysroot=$lt_sysroot - --# Command to truncate a binary pipe. --lt_truncate_bin=$lt_lt_cv_truncate_bin -- - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -18113,11 +17662,8 @@ - # Compile-time system search path for libraries. - sys_lib_search_path_spec=$lt_sys_lib_search_path_spec - --# Detected run-time system search path for libraries. --sys_lib_dlsearch_path_spec=$lt_configure_time_dlsearch_path -- --# Explicit LT_SYS_LIBRARY_PATH set during ./configure time. --configure_time_lt_sys_library_path=$lt_configure_time_lt_sys_library_path -+# Run-time system search path for libraries. -+sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec - - # Whether dlopen is supported. - dlopen_support=$enable_dlopen -@@ -18210,13 +17756,13 @@ - # Whether we need a single "-rpath" flag with a separated argument. - hardcode_libdir_separator=$lt_hardcode_libdir_separator - --# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes -+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes - # DIR into the resulting binary. - hardcode_direct=$hardcode_direct - --# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes -+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes - # DIR into the resulting binary and the resulting library dependency is --# "absolute",i.e impossible to change by setting \$shlibpath_var if the -+# "absolute",i.e impossible to change by setting \${shlibpath_var} if the - # library is relocated. - hardcode_direct_absolute=$hardcode_direct_absolute - -@@ -18268,72 +17814,13 @@ - - _LT_EOF - -- cat <<'_LT_EOF' >> "$cfgfile" -- --# ### BEGIN FUNCTIONS SHARED WITH CONFIGURE -- --# func_munge_path_list VARIABLE PATH --# ----------------------------------- --# VARIABLE is name of variable containing _space_ separated list of --# directories to be munged by the contents of PATH, which is string --# having a format: --# "DIR[:DIR]:" --# string "DIR[ DIR]" will be prepended to VARIABLE --# ":DIR[:DIR]" --# string "DIR[ DIR]" will be appended to VARIABLE --# "DIRP[:DIRP]::[DIRA:]DIRA" --# string "DIRP[ DIRP]" will be prepended to VARIABLE and string --# "DIRA[ DIRA]" will be appended to VARIABLE --# "DIR[:DIR]" --# VARIABLE will be replaced by "DIR[ DIR]" --func_munge_path_list () --{ -- case x$2 in -- x) -- ;; -- *:) -- eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\" -- ;; -- x:*) -- eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\" -- ;; -- *::*) -- eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" -- eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\" -- ;; -- *) -- eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\" -- ;; -- esac --} -- -- --# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. --func_cc_basename () --{ -- for cc_temp in $*""; do -- case $cc_temp in -- compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -- distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -- \-*) ;; -- *) break;; -- esac -- done -- func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` --} -- -- --# ### END FUNCTIONS SHARED WITH CONFIGURE -- --_LT_EOF -- - case $host_os in - aix3*) - cat <<\_LT_EOF >> "$cfgfile" - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. --if test set != "${COLLECT_NAMES+set}"; then -+if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi -@@ -18342,7 +17829,7 @@ - esac - - --ltmain=$ac_aux_dir/ltmain.sh -+ltmain="$ac_aux_dir/ltmain.sh" - - - # We use sed instead of cat because bash on DJGPP gets confused if -@@ -18352,6 +17839,165 @@ - sed '$q' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ - mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/include/config.h.in psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/include/config.h.in ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/include/config.h.in 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/include/config.h.in 2020-07-16 10:48:35.486141739 +0200 -@@ -266,7 +266,8 @@ - /* Define which x86 cycle counter to use */ - #undef LINUX86_CYCLE_RDTSCP - --/* Define to the sub-directory where libtool stores uninstalled libraries. */ -+/* Define to the sub-directory in which libtool stores uninstalled libraries. -+ */ - #undef LT_OBJDIR - - /* Define if use MMAP shared memory */ -@@ -311,6 +312,9 @@ - /* Define if usleep needs a declaration */ - #undef NEEDS_USLEEP_DECL - -+/* Define to 1 if your C compiler doesn't accept -c and -o together. */ -+#undef NO_MINUS_C_MINUS_O -+ - /* Name of package */ - #undef PACKAGE - -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/Makefile.in psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/Makefile.in ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/mpl/Makefile.in 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/mpl/Makefile.in 2020-07-16 10:48:35.420135067 +0200 -@@ -64,17 +64,7 @@ - - - VPATH = @srcdir@ --am__is_gnu_make = { \ -- if test -z '$(MAKELEVEL)'; then \ -- false; \ -- elif test -n '$(MAKE_HOST)'; then \ -- true; \ -- elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ -- true; \ -- else \ -- false; \ -- fi; \ --} -+am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' - am__make_running_with_option = \ - case $${target_option-} in \ - ?) ;; \ -@@ -137,6 +127,26 @@ - POST_UNINSTALL = : - build_triplet = @build@ - host_triplet = @host@ -+DIST_COMMON = $(srcdir)/src/Makefile.mk $(srcdir)/src/bt/Makefile.mk \ -+ $(srcdir)/src/dbg/Makefile.mk $(srcdir)/src/env/Makefile.mk \ -+ $(srcdir)/src/mem/Makefile.mk $(srcdir)/src/msg/Makefile.mk \ -+ $(srcdir)/src/sock/Makefile.mk $(srcdir)/src/str/Makefile.mk \ -+ $(srcdir)/src/thread/Makefile.mk \ -+ $(srcdir)/src/timer/Makefile.mk $(srcdir)/src/shm/Makefile.mk \ -+ $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ -+ $(top_srcdir)/configure $(am__configure_deps) \ -+ $(top_srcdir)/include/config.h.in $(srcdir)/localdefs.in \ -+ $(top_srcdir)/include/mpl_timer.h.in \ -+ $(top_srcdir)/confdb/depcomp $(am__include_HEADERS_DIST) \ -+ $(am__noinst_HEADERS_DIST) $(top_srcdir)/confdb/test-driver \ -+ confdb/ar-lib confdb/compile confdb/config.guess \ -+ confdb/config.rpath confdb/config.sub confdb/depcomp \ -+ confdb/install-sh confdb/missing confdb/ltmain.sh \ -+ $(top_srcdir)/confdb/ar-lib $(top_srcdir)/confdb/compile \ -+ $(top_srcdir)/confdb/config.guess \ -+ $(top_srcdir)/confdb/config.sub \ -+ $(top_srcdir)/confdb/install-sh $(top_srcdir)/confdb/ltmain.sh \ -+ $(top_srcdir)/confdb/missing - TESTS = $(am__EXEEXT_1) - check_PROGRAMS = $(am__EXEEXT_1) - subdir = . -@@ -152,9 +162,6 @@ - $(top_srcdir)/configure.ac - am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) --DIST_COMMON = $(srcdir)/Makefile.am $(top_srcdir)/configure \ -- $(am__configure_deps) $(am__include_HEADERS_DIST) \ -- $(am__noinst_HEADERS_DIST) $(am__DIST_COMMON) - am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ - configure.lineno config.status.lineno - mkinstalldirs = $(install_sh) -d -@@ -507,22 +514,6 @@ - TEST_LOG_DRIVER = $(SHELL) $(top_srcdir)/confdb/test-driver - TEST_LOG_COMPILE = $(TEST_LOG_COMPILER) $(AM_TEST_LOG_FLAGS) \ - $(TEST_LOG_FLAGS) --am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/localdefs.in \ -- $(srcdir)/src/Makefile.mk $(srcdir)/src/bt/Makefile.mk \ -- $(srcdir)/src/dbg/Makefile.mk $(srcdir)/src/env/Makefile.mk \ -- $(srcdir)/src/mem/Makefile.mk $(srcdir)/src/msg/Makefile.mk \ -- $(srcdir)/src/shm/Makefile.mk $(srcdir)/src/sock/Makefile.mk \ -- $(srcdir)/src/str/Makefile.mk $(srcdir)/src/thread/Makefile.mk \ -- $(srcdir)/src/timer/Makefile.mk $(top_srcdir)/confdb/ar-lib \ -- $(top_srcdir)/confdb/compile $(top_srcdir)/confdb/config.guess \ -- $(top_srcdir)/confdb/config.sub $(top_srcdir)/confdb/depcomp \ -- $(top_srcdir)/confdb/install-sh $(top_srcdir)/confdb/ltmain.sh \ -- $(top_srcdir)/confdb/missing $(top_srcdir)/confdb/test-driver \ -- $(top_srcdir)/include/config.h.in \ -- $(top_srcdir)/include/mpl_timer.h.in confdb/ar-lib \ -- confdb/compile confdb/config.guess confdb/config.rpath \ -- confdb/config.sub confdb/depcomp confdb/install-sh \ -- confdb/ltmain.sh confdb/missing - DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) - distdir = $(PACKAGE)-$(VERSION) - top_distdir = $(distdir) -@@ -580,7 +571,6 @@ - LIPO = @LIPO@ - LN_S = @LN_S@ - LTLIBOBJS = @LTLIBOBJS@ --LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ - MAKEINFO = @MAKEINFO@ - MANIFEST_TOOL = @MANIFEST_TOOL@ - MKDIR_P = @MKDIR_P@ -@@ -732,6 +722,7 @@ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ - $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign Makefile -+.PRECIOUS: Makefile - Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ -@@ -741,7 +732,7 @@ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__maybe_remake_depfiles);; \ - esac; --$(srcdir)/src/Makefile.mk $(srcdir)/src/bt/Makefile.mk $(srcdir)/src/dbg/Makefile.mk $(srcdir)/src/env/Makefile.mk $(srcdir)/src/mem/Makefile.mk $(srcdir)/src/msg/Makefile.mk $(srcdir)/src/sock/Makefile.mk $(srcdir)/src/str/Makefile.mk $(srcdir)/src/thread/Makefile.mk $(srcdir)/src/timer/Makefile.mk $(srcdir)/src/shm/Makefile.mk $(am__empty): -+$(srcdir)/src/Makefile.mk $(srcdir)/src/bt/Makefile.mk $(srcdir)/src/dbg/Makefile.mk $(srcdir)/src/env/Makefile.mk $(srcdir)/src/mem/Makefile.mk $(srcdir)/src/msg/Makefile.mk $(srcdir)/src/sock/Makefile.mk $(srcdir)/src/str/Makefile.mk $(srcdir)/src/thread/Makefile.mk $(srcdir)/src/timer/Makefile.mk $(srcdir)/src/shm/Makefile.mk: - - $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - $(SHELL) ./config.status --recheck -@@ -753,8 +744,8 @@ - $(am__aclocal_m4_deps): - - include/config.h: include/stamp-h1 -- @test -f $@ || rm -f include/stamp-h1 -- @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) include/stamp-h1 -+ @if test ! -f $@; then rm -f include/stamp-h1; else :; fi -+ @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) include/stamp-h1; else :; fi - - include/stamp-h1: $(top_srcdir)/include/config.h.in $(top_builddir)/config.status - @rm -f include/stamp-h1 -@@ -1179,7 +1170,7 @@ - if test -n "$$am__remaking_logs"; then \ - echo "fatal: making $(TEST_SUITE_LOG): possible infinite" \ - "recursion detected" >&2; \ -- elif test -n "$$redo_logs"; then \ -+ else \ - am__remaking_logs=yes $(MAKE) $(AM_MAKEFLAGS) $$redo_logs; \ - fi; \ - if $(am__make_dryrun); then :; else \ -@@ -1355,7 +1346,7 @@ - ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ - || chmod -R a+r "$(distdir)" - dist-gzip: distdir -- tardir=$(distdir) && $(am__tar) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).tar.gz -+ tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz - $(am__post_remove_distdir) - - dist-bzip2: distdir -@@ -1371,17 +1362,11 @@ - $(am__post_remove_distdir) - - dist-tarZ: distdir -- @echo WARNING: "Support for distribution archives compressed with" \ -- "legacy program 'compress' is deprecated." >&2 -- @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 - tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z - $(am__post_remove_distdir) - - dist-shar: distdir -- @echo WARNING: "Support for shar distribution archives is" \ -- "deprecated." >&2 -- @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 -- shar $(distdir) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).shar.gz -+ shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz - $(am__post_remove_distdir) - - dist-zip: distdir -@@ -1399,7 +1384,7 @@ - distcheck: dist - case '$(DIST_ARCHIVES)' in \ - *.tar.gz*) \ -- eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).tar.gz | $(am__untar) ;;\ -+ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ - *.tar.bz2*) \ - bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ - *.tar.lz*) \ -@@ -1409,23 +1394,22 @@ - *.tar.Z*) \ - uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ - *.shar.gz*) \ -- eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).shar.gz | unshar ;;\ -+ GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ - *.zip*) \ - unzip $(distdir).zip ;;\ - esac - chmod -R a-w $(distdir) - chmod u+w $(distdir) -- mkdir $(distdir)/_build $(distdir)/_build/sub $(distdir)/_inst -+ mkdir $(distdir)/_build $(distdir)/_inst - chmod a-w $(distdir) - test -d $(distdir)/_build || exit 0; \ - dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ - && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ - && am__cwd=`pwd` \ -- && $(am__cd) $(distdir)/_build/sub \ -- && ../../configure \ -+ && $(am__cd) $(distdir)/_build \ -+ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ - $(AM_DISTCHECK_CONFIGURE_FLAGS) \ - $(DISTCHECK_CONFIGURE_FLAGS) \ -- --srcdir=../.. --prefix="$$dc_install_base" \ - && $(MAKE) $(AM_MAKEFLAGS) \ - && $(MAKE) $(AM_MAKEFLAGS) dvi \ - && $(MAKE) $(AM_MAKEFLAGS) check \ -@@ -1699,8 +1683,6 @@ - recheck tags tags-am uninstall uninstall-am \ - uninstall-includeHEADERS uninstall-libLTLIBRARIES - --.PRECIOUS: Makefile -- - - # -------------------------------------------------------------------------- - .PHONY: coverage -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/test/Makefile.in psmpi-5.4.6-1/mpich2/src/mpi/romio/test/Makefile.in ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/test/Makefile.in 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/test/Makefile.in 2020-07-16 10:48:35.489259068 +0200 -@@ -24,17 +24,7 @@ - # will not be built by default, but it will participate in "make distclean" and - # friends. - --am__is_gnu_make = { \ -- if test -z '$(MAKELEVEL)'; then \ -- false; \ -- elif test -n '$(MAKE_HOST)'; then \ -- true; \ -- elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ -- true; \ -- else \ -- false; \ -- fi; \ --} -+am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' - am__make_running_with_option = \ - case $${target_option-} in \ - ?) ;; \ -@@ -100,6 +90,11 @@ - noinst_PROGRAMS = $(am__EXEEXT_1) $(am__EXEEXT_3) - @BUILD_F77_TESTS_TRUE@am__append_1 = $(FTESTS) - subdir = test -+DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ -+ $(srcdir)/misc.c.in $(srcdir)/large_file.c.in \ -+ $(srcdir)/runtests.in $(srcdir)/fmisc.f.in \ -+ $(srcdir)/fcoll_test.f.in $(srcdir)/pfcoll_test.f.in \ -+ $(srcdir)/fperf.f.in $(top_srcdir)/confdb/depcomp README - ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 - am__aclocal_m4_deps = $(top_srcdir)/confdb/aclocal_cc.m4 \ - $(top_srcdir)/confdb/aclocal_check_visibility.m4 \ -@@ -114,7 +109,6 @@ - $(top_srcdir)/configure.ac - am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) --DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) - mkinstalldirs = $(install_sh) -d - CONFIG_HEADER = $(top_builddir)/adio/include/romioconf.h - CONFIG_CLEAN_FILES = misc.c large_file.c runtests fmisc.f fcoll_test.f \ -@@ -381,11 +375,6 @@ - done | $(am__uniquify_input)` - ETAGS = etags - CTAGS = ctags --am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/fcoll_test.f.in \ -- $(srcdir)/fmisc.f.in $(srcdir)/fperf.f.in \ -- $(srcdir)/large_file.c.in $(srcdir)/misc.c.in \ -- $(srcdir)/pfcoll_test.f.in $(srcdir)/runtests.in \ -- $(top_srcdir)/confdb/depcomp README - DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) - VPATH = @VPATH@ - ACLOCAL = @ACLOCAL@ -@@ -446,6 +435,7 @@ - HAVE_MPI_DARRAY_SUBARRAY = @HAVE_MPI_DARRAY_SUBARRAY@ - HAVE_MPI_INFO = @HAVE_MPI_INFO@ - HAVE_WEAK_SYMBOLS = @HAVE_WEAK_SYMBOLS@ -+IME_INSTALL_PATH = @IME_INSTALL_PATH@ - INSTALL = @INSTALL@ - INSTALL_DATA = @INSTALL_DATA@ - INSTALL_PROGRAM = @INSTALL_PROGRAM@ -@@ -461,7 +451,6 @@ - LL = @LL@ - LN_S = @LN_S@ - LTLIBOBJS = @LTLIBOBJS@ --LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ - MAINT = @MAINT@ - MAKE = @MAKE@ - MAKEINFO = @MAKEINFO@ -@@ -628,6 +617,7 @@ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign test/Makefile'; \ - $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign test/Makefile -+.PRECIOUS: Makefile - Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ -@@ -1144,8 +1134,6 @@ - mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ - tags tags-am uninstall uninstall-am - --.PRECIOUS: Makefile -- - - # Tell versions [3.59,3.63) of GNU make to not export all variables. - # Otherwise a system limit (for SysV at least) may be exceeded. -diff -ruN psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/test-internal/Makefile.in psmpi-5.4.6-1/mpich2/src/mpi/romio/test-internal/Makefile.in ---- psmpi-5.4.6-1.orig/mpich2/src/mpi/romio/test-internal/Makefile.in 2020-06-17 13:57:56.000000000 +0200 -+++ psmpi-5.4.6-1/mpich2/src/mpi/romio/test-internal/Makefile.in 2020-07-16 10:48:35.487333588 +0200 -@@ -24,17 +24,7 @@ - # will not be built by default, but it will participate in "make distclean" and - # friends. - --am__is_gnu_make = { \ -- if test -z '$(MAKELEVEL)'; then \ -- false; \ -- elif test -n '$(MAKE_HOST)'; then \ -- true; \ -- elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ -- true; \ -- else \ -- false; \ -- fi; \ --} -+am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' - am__make_running_with_option = \ - case $${target_option-} in \ - ?) ;; \ -@@ -99,6 +89,8 @@ - host_triplet = @host@ - noinst_PROGRAMS = $(am__EXEEXT_1) - subdir = test-internal -+DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ -+ $(top_srcdir)/confdb/depcomp - ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 - am__aclocal_m4_deps = $(top_srcdir)/confdb/aclocal_cc.m4 \ - $(top_srcdir)/confdb/aclocal_check_visibility.m4 \ -@@ -113,7 +105,6 @@ - $(top_srcdir)/configure.ac - am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) --DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) - mkinstalldirs = $(install_sh) -d - CONFIG_HEADER = $(top_builddir)/adio/include/romioconf.h - CONFIG_CLEAN_FILES = -@@ -201,7 +192,6 @@ - done | $(am__uniquify_input)` - ETAGS = etags - CTAGS = ctags --am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/confdb/depcomp - DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) - VPATH = @VPATH@ - ACLOCAL = @ACLOCAL@ -@@ -262,6 +252,7 @@ - HAVE_MPI_DARRAY_SUBARRAY = @HAVE_MPI_DARRAY_SUBARRAY@ - HAVE_MPI_INFO = @HAVE_MPI_INFO@ - HAVE_WEAK_SYMBOLS = @HAVE_WEAK_SYMBOLS@ -+IME_INSTALL_PATH = @IME_INSTALL_PATH@ - INSTALL = @INSTALL@ - INSTALL_DATA = @INSTALL_DATA@ - INSTALL_PROGRAM = @INSTALL_PROGRAM@ -@@ -277,7 +268,6 @@ - LL = @LL@ - LN_S = @LN_S@ - LTLIBOBJS = @LTLIBOBJS@ --LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ - MAINT = @MAINT@ - MAKE = @MAKE@ - MAKEINFO = @MAKEINFO@ -@@ -434,6 +424,7 @@ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign test-internal/Makefile'; \ - $(am__cd) $(top_srcdir) && \ - $(AUTOMAKE) --foreign test-internal/Makefile -+.PRECIOUS: Makefile - Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ -@@ -729,8 +720,6 @@ - mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ - tags tags-am uninstall uninstall-am - --.PRECIOUS: Makefile -- - - # Tell versions [3.59,3.63) of GNU make to not export all variables. - # Otherwise a system limit (for SysV at least) may be exceeded. diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb b/Golden_Repo/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb deleted file mode 100644 index a3ca2c49a73c12d264e999b7c22fa5c39da2c334..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb +++ /dev/null @@ -1,53 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-NVHPC-20.7-GCC-9.3.0.eb b/Golden_Repo/p/psmpi/psmpi-5.4.7-1-NVHPC-20.7-GCC-9.3.0.eb deleted file mode 100644 index 0fe539f7cfa08316a8366059ba50564df004459c..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-NVHPC-20.7-GCC-9.3.0.eb +++ /dev/null @@ -1,55 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '20.7-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# Needed since nvfortran can't deal with --allow-shlib-undefined, but pgf77 can -preconfigopts += 'unset F77 && ' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-NVHPC-20.9-GCC-9.3.0.eb b/Golden_Repo/p/psmpi/psmpi-5.4.7-1-NVHPC-20.9-GCC-9.3.0.eb deleted file mode 100644 index 8fd4092d91311c53e42e768c3e870e2806a2f399..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-NVHPC-20.9-GCC-9.3.0.eb +++ /dev/null @@ -1,55 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '20.9-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -# Needed since nvfortran can't deal with --allow-shlib-undefined, but pgf77 can -preconfigopts = 'unset F77 && ' -preconfigopts += 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings/CUDA") ) then - load("mpi-settings/CUDA") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb b/Golden_Repo/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb deleted file mode 100644 index e4ee4e434d89b12fa933cec47881a3a2d909e5a8..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb +++ /dev/null @@ -1,54 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' -versionsuffix = '-mt' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' - -threaded = True - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 739b3d9ce3aac6b722db1e4820a4cf9887395876..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,53 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.8-1-NVHPC-21.1-GCC-9.3.0.eb b/Golden_Repo/p/psmpi/psmpi-5.4.8-1-NVHPC-21.1-GCC-9.3.0.eb deleted file mode 100644 index 7a1c7158799bcd2c3aa03247d47544e4286d041e..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.8-1-NVHPC-21.1-GCC-9.3.0.eb +++ /dev/null @@ -1,55 +0,0 @@ -name = 'psmpi' -version = '5.4.8-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '21.1-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -# Needed since nvfortran can't deal with --allow-shlib-undefined, but pgf77 can -preconfigopts = 'unset F77 && ' -preconfigopts += 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings/CUDA") ) then - load("mpi-settings/CUDA") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb b/Golden_Repo/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb deleted file mode 100644 index 02d51c0cd113f5587ce2666032b63538021556f5..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -local_cuda_ver = '11.3' - -dependencies = [ - ('pscom', f'5.4-default-CUDA-{local_cuda_ver}', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10'), - # Including CUDA here to trigger the hook to add the gpu property, and because it is actually needed - ('CUDA', local_cuda_ver, '', SYSTEM) -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# Needed due to changes in GCC10. See https://github.com/pmodels/mpich/issues/4300 -preconfigopts += 'export FCFLAGS="-fallow-argument-mismatch $FCFLAGS" && ' -preconfigopts += 'export FFLAGS="-fallow-argument-mismatch $FFLAGS" && ' - -threaded = False - -cuda = True - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.9-1-NVHPC-21.5-GCC-10.3.0.eb b/Golden_Repo/p/psmpi/psmpi-5.4.9-1-NVHPC-21.5-GCC-10.3.0.eb deleted file mode 100644 index 04e5b05a256d72b1849d3f3a025026210ae8557a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.9-1-NVHPC-21.5-GCC-10.3.0.eb +++ /dev/null @@ -1,54 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '21.5-GCC-10.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -local_cuda_ver = '11.3' - -dependencies = [ - ('pscom', f'5.4-default-CUDA-{local_cuda_ver}', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10'), - # Including CUDA here to trigger the hook to add the gpu property, and because it is actually needed - ('CUDA', local_cuda_ver, '', SYSTEM) -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' - -threaded = False - -cuda = True - -# We need this here since the hook does not consider the compiler toolchain when injecting these vars -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings/CUDA") ) then - load("mpi-settings/CUDA") -end -''' - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb b/Golden_Repo/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb deleted file mode 100644 index de59f84fb596cf0f7a944166e3e89b642bde07f1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb +++ /dev/null @@ -1,45 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' -versionsuffix = '-mt' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -local_cuda_ver = '11.3' - -dependencies = [ - ('pscom', f'5.4-default-CUDA-{local_cuda_ver}', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10'), - # Including CUDA here to trigger the hook to add the gpu property, and because it is actually needed - ('CUDA', local_cuda_ver, '', SYSTEM) -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' - -threaded = True - -cuda = True - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index f55d05c08ddf8dce65401d8f86aec7703e155bae..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -local_cuda_ver = '11.3' - -dependencies = [ - ('pscom', f'5.4-default-CUDA-{local_cuda_ver}', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10'), - # Including CUDA here to trigger the hook to add the gpu property, and because it is actually needed - ('CUDA', local_cuda_ver, '', SYSTEM) -] - -patches = [ - 'psmpi_shebang.patch', - 'psmpi-5.4.6-1_ime.patch' -] - -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ime+ufs --enable-romio' - -preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' - -threaded = False - -cuda = True - -moduleclass = 'mpi' diff --git a/Golden_Repo/p/psmpi/psmpi_shebang.patch b/Golden_Repo/p/psmpi/psmpi_shebang.patch deleted file mode 100644 index 4983f5995e39a086a06a99f6386a06932350e44a..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/psmpi/psmpi_shebang.patch +++ /dev/null @@ -1,36 +0,0 @@ -diff -ruN psmpi-5.2.0-1.old/mpich2/src/env/mpicc.bash.in psmpi-5.2.0-1/mpich2/src/env/mpicc.bash.in ---- psmpi-5.2.0-1.old/mpich2/src/env/mpicc.bash.in 2017-03-08 20:47:13.159276458 +0100 -+++ psmpi-5.2.0-1/mpich2/src/env/mpicc.bash.in 2017-03-08 20:47:48.430966270 +0100 -@@ -1,4 +1,4 @@ --#! @BASH_SHELL@ -+#!/usr/bin/env bash - # - # (C) 2006 by Argonne National Laboratory. - # See COPYRIGHT in top-level directory. -diff -ruN psmpi-5.2.0-1.old/mpich2/src/env/mpicxx.bash.in psmpi-5.2.0-1/mpich2/src/env/mpicxx.bash.in ---- psmpi-5.2.0-1.old/mpich2/src/env/mpicxx.bash.in 2017-03-08 20:47:13.160276506 +0100 -+++ psmpi-5.2.0-1/mpich2/src/env/mpicxx.bash.in 2017-03-08 20:47:51.549115658 +0100 -@@ -1,4 +1,4 @@ --#! @BASH_SHELL@ -+#!/usr/bin/env bash - # - # (C) 2006 by Argonne National Laboratory. - # See COPYRIGHT in top-level directory. -diff -ruN psmpi-5.2.0-1.old/mpich2/src/env/mpif77.bash.in psmpi-5.2.0-1/mpich2/src/env/mpif77.bash.in ---- psmpi-5.2.0-1.old/mpich2/src/env/mpif77.bash.in 2017-03-08 20:47:13.160276506 +0100 -+++ psmpi-5.2.0-1/mpich2/src/env/mpif77.bash.in 2017-03-08 20:47:55.148288103 +0100 -@@ -1,4 +1,4 @@ --#! @BASH_SHELL@ -+#!/usr/bin/env bash - # - # (C) 2006 by Argonne National Laboratory. - # See COPYRIGHT in top-level directory. -diff -ruN psmpi-5.2.0-1.old/mpich2/src/env/mpifort.bash.in psmpi-5.2.0-1/mpich2/src/env/mpifort.bash.in ---- psmpi-5.2.0-1.old/mpich2/src/env/mpifort.bash.in 2017-03-08 20:47:13.160276506 +0100 -+++ psmpi-5.2.0-1/mpich2/src/env/mpifort.bash.in 2017-03-08 20:48:08.913947609 +0100 -@@ -1,4 +1,4 @@ --#! @BASH_SHELL@ -+#!/usr/bin/env bash - # - # (C) 2006 by Argonne National Laboratory. - # See COPYRIGHT in top-level directory. diff --git a/Golden_Repo/p/pybind11/pybind11-2.5.0-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/p/pybind11/pybind11-2.5.0-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 54c388aa360298d2212bf470a40ca6a91b293c59..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pybind11/pybind11-2.5.0-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'pybind11' -version = '2.5.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pybind11.readthedocs.io' -description = """pybind11 is a lightweight header-only library that exposes C++ types in Python and vice versa, - mainly to create Python bindings of existing C++ code.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/pybind/pybind11/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Eigen', '3.3.7'), - ('binutils', '2.36.1'), -] -dependencies = [('Python', '3.8.5')] - -configopts = "-DPYTHON_EXECUTABLE=$EBROOTPYTHON/bin/python" - -sanity_check_paths = { - 'files': [], - 'dirs': ['include/pybind11', 'share/cmake/pybind11', 'lib'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/p/pybind11/pybind11-2.5.0-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/pybind11/pybind11-2.5.0-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index b42f9cc7ca7a59513ca0db111eea2e582bfe9abd..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pybind11/pybind11-2.5.0-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'pybind11' -version = '2.5.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pybind11.readthedocs.io' -description = """pybind11 is a lightweight header-only library that exposes C++ types in Python and vice versa, - mainly to create Python bindings of existing C++ code.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/pybind/pybind11/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('Eigen', '3.3.7'), - ('binutils', '2.34'), -] -dependencies = [('Python', '3.8.5')] - -configopts = "-DPYTHON_EXECUTABLE=$EBROOTPYTHON/bin/python" - -sanity_check_paths = { - 'files': [], - 'dirs': ['include/pybind11', 'share/cmake/pybind11', 'lib'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/p/pyproj/pyproj-2.6.1.post1-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/p/pyproj/pyproj-2.6.1.post1-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index b3a019d574ecb6e8491083576a2833ed9c01e4e1..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pyproj/pyproj-2.6.1.post1-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'pyproj' -version = '2.6.1.post1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pyproj4.github.io/pyproj' -description = "Python interface to PROJ4 library for cartographic transformations" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['4f5b02b4abbd41610397c635b275a8ee4a2b5bc72a75572b98ac6ae7befa471e'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PROJ', '7.1.0'), -] - -download_dep_fail = True -use_pip = True - -preinstallopts = "export PROJ_DIR=$EBROOTPROJ && " - -sanity_pip_check = True - -moduleclass = 'data' diff --git a/Golden_Repo/p/pyproj/pyproj-2.6.1.post1-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/p/pyproj/pyproj-2.6.1.post1-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 0978d8a944fd9891ec32772198f69e59b9c05a54..0000000000000000000000000000000000000000 --- a/Golden_Repo/p/pyproj/pyproj-2.6.1.post1-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'pyproj' -version = '2.6.1.post1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pyproj4.github.io/pyproj' -description = "Python interface to PROJ4 library for cartographic transformations" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['4f5b02b4abbd41610397c635b275a8ee4a2b5bc72a75572b98ac6ae7befa471e'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PROJ', '7.1.0'), -] - -download_dep_fail = True -use_pip = True - -preinstallopts = "export PROJ_DIR=$EBROOTPROJ && " - -sanity_pip_check = True - -moduleclass = 'data' diff --git a/Golden_Repo/q/Qhull/Qhull-2020.1-GCCcore-10.3.0.eb b/Golden_Repo/q/Qhull/Qhull-2020.1-GCCcore-10.3.0.eb deleted file mode 100644 index 138aae5250b070f80b1523bdff07bc8e7423e974..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qhull/Qhull-2020.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Qhull' -version = '2020.1' - -homepage = 'http://www.qhull.org' -description = """ -Qhull computes the convex hull, Delaunay triangulation, Voronoi diagram, halfspace intersection about a point, -furthest-site Delaunay triangulation, and furthest-site Voronoi diagram. The source code runs in 2-d, 3-d, 4-d, -and higher dimensions. Qhull implements the Quickhull algorithm for computing the convex hull. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -# Recently changed the Unix version numbering see http://www.qhull.org/src/Changes.txt -# and https://github.com/Homebrew/homebrew-science/issues/3353 -sources = ['%(namelower)s-%(version_major)s-src-8.0.0.tgz'] -source_urls = ['http://www.qhull.org/download/'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - - -sanity_check_paths = { - 'files': ['bin/qhull', 'lib/libqhull.%s' % SHLIB_EXT, 'lib/pkgconfig/qhull.pc'], - 'dirs': [], -} - -modextrapaths = { - 'CPATH': ['qhull/include'], -} - -parallel = 1 - -moduleclass = 'math' diff --git a/Golden_Repo/q/Qhull/Qhull-2020.1-GCCcore-9.3.0.eb b/Golden_Repo/q/Qhull/Qhull-2020.1-GCCcore-9.3.0.eb deleted file mode 100644 index 09d0d58443905b2770901c4495d728d2892d2da9..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qhull/Qhull-2020.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Qhull' -version = '2020.1' - -homepage = 'http://www.qhull.org' -description = """ -Qhull computes the convex hull, Delaunay triangulation, Voronoi diagram, halfspace intersection about a point, -furthest-site Delaunay triangulation, and furthest-site Voronoi diagram. The source code runs in 2-d, 3-d, 4-d, -and higher dimensions. Qhull implements the Quickhull algorithm for computing the convex hull. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Recently changed the Unix version numbering see http://www.qhull.org/src/Changes.txt -# and https://github.com/Homebrew/homebrew-science/issues/3353 -sources = ['%(namelower)s-%(version_major)s-src-8.0.0.tgz'] -source_urls = ['http://www.qhull.org/download/'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - - -sanity_check_paths = { - 'files': ['bin/qhull', 'lib/libqhull.%s' % SHLIB_EXT, 'lib/pkgconfig/qhull.pc'], - 'dirs': [], -} - -modextrapaths = { - 'CPATH': ['qhull/include'], -} - -parallel = 1 - -moduleclass = 'math' diff --git a/Golden_Repo/q/Qiskit/Qiskit-0.23.5-gpsmkl-2020-Python-3.8.5.eb b/Golden_Repo/q/Qiskit/Qiskit-0.23.5-gpsmkl-2020-Python-3.8.5.eb deleted file mode 100644 index 27e3663f6bf5236803eadd472e6608f42026d9b6..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qiskit/Qiskit-0.23.5-gpsmkl-2020-Python-3.8.5.eb +++ /dev/null @@ -1,158 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'Qiskit' -version = '0.23.5' -versionsuffix = '-Python-%(pyver)s' -local_aerver = '0.7.4' # version of qiskit-aer - -homepage = 'https://qiskit.org' -description = """Qiskit is an open-source framework for working with noisy quantum computers - at the level of pulses, circuits, and algorithms.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), - ('Rust', '1.47.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('scikit', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('CVXOPT', '1.2.5', versionsuffix), - ('h5py', '2.10.0', '-serial' + versionsuffix), - ('PySCF', '1.7.1', versionsuffix), - ('pybind11', '2.5.0', versionsuffix), # for qiskit-aer - ('nlohmann-json', '3.9.1'), # for qiskit-aer - ('spdlog', '1.8.2'), # for qiskit-aer - ('muparserx', '4.0.8'), # for qiskit-aer -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -# qiskit-aer must not use CONAN to install dependencies -modextravars = {'DISABLE_CONAN': 'YES'} - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': False, # DISABLED: because 'pip check' does not find pyscf (not installed with pip) - 'download_dep_fail': True, - 'use_pip_for_deps': False, -} - -exts_list = [ - ('python_constraint', '1.4.0', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'python-constraint-%(version)s.tar.bz2'), - ('checksums', [('sha256', '501d6f17afe0032dfc6ea6c0f8acc12e44f992733f00e8538961031ef27ccb8e')]), - ('modulename', 'constraint'), - ])), - ('dill', '0.3.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '42d8ef819367516592a825746a18073ced42ca169ab1f5f4044134703e7a049c')]), - ])), - ('arrow', '0.15.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5390e464e2c5f76971b60ffa7ee29c598c7501a294bc9f5e6dadcb251a5d027b')]), - ])), - ('marshmallow', '2.20.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a339159e422a055269f5625df51fbdc7fb20512cfffa08451cd5727783ddca39')]), - ])), - ('marshmallow-polyfield', '3.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '72980cb9a43a7c750580b4b08e9d01a8cbd583e1f59360f1924a1ed60f065a4c')]), - ])), - ('pylatexenc', '1.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ef2d5260c38e2cb4d2829e8b918914a558557820d4f57cb6588a81e827de2bb3')]), - ])), - ('qiskit-terra', '0.16.4', dict(list(local_common_opts.items()) + [ - ('patches', ['qiskit-terra-0.9.0_fix-qiskit-version-env.patch']), - ('checksums', [( - # qiskit-terra-0.16.4.tar.gz - ('sha256', '2322e7e8d5a208c649432304f494bf73c4a4ce2f852128125fbaf9dbed1c1538'), - # qiskit-terra-0.9.0_fix-qiskit-version-env.patch - ('sha256', 'a297f338a97f956513d849286087bab5508301e214054d07482efc90a5ab1b74'), - )]), - ('modulename', 'qiskit.qobj'), - ])), - # retworkx installs `setuptools-rust`, if we do not - ('semantic_version', '2.8.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd2cb2de0558762934679b9a104e82eca7af448c9f4974d1f3eeccff651df8a54')]), - ])), - ('setuptools-rust', '0.11.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a5b5954909cbc5d66b914ee6763f81fa2610916041c7266105a469f504a7c4ca')]), - ])), - ('retworkx', '0.7.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3fcd08031b743766935e01696c01545a24d1ef16c854333ba835c96a66eb76a9')]), - ])), - ('fastjsonschema', '2.14.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'afbc235655f06356e46caa80190512e4d9222abfaca856041be5a74c665fa094')]), - ])), - ('qiskit-ignis', '0.5.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ca0eca5ad265388b1cd8914ceb31c50a6101f4b2de634553dcb2b5c9e122c840')]), - ('modulename', 'qiskit.ignis'), - ])), - ('fastdtw', '0.3.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2350fa6ec36bcad186eaf81f46eff35181baf04e324f522de8aeb43d0243f64f')]), - ])), - ('dlx', '1.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ef75bc9d590216ebde7d4811f9ae6b2d6c6dc2a54772d94ae13384dc517a5aae')]), - ])), - ('docloud', '1.0.375', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '996d55407498fd01e6c6c480f367048f92255e9ca9db0e9ea19aaef91328a441')]), - ])), - ('docplex', '2.10.154', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '34a36ff25dd44b72cfe76e01dd0658eb7a451b0c3a52ef9f98e8e38fed83d537')]), - ])), - ('joblib', '0.14.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0630eea4f5664c463f23fbf5dcfc54a2bc6168902719fa8e19daf033022786c8')]), - ])), - ('inflection', '0.3.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '18ea7fb7a7d152853386523def08736aa8c32636b047ade55f7578c4edeb16ca')]), - ])), - ('Quandl', '3.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c3a9fc5ec1b585eeacd97531454e7795dde7a072c057f21335e5918cb905fc7e')]), - ])), - ('qiskit-aqua', '0.8.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('bea761485826bb3858b3b446571b944dd4c6689f81f16ad96ff8a317b35613c6')]), - ('modulename', 'qiskit.aqua'), - ])), - ('websockets', '7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '08e3c3e0535befa4f0c4443824496c03ecc25062debbcf895874f8a0b4c97c9f')]), - ])), - ('ntlm-auth', '1.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '350f2389c8ee5517f47db55a36ac2f8efc9742a60a678d6e2caa92385bdcaa9a')]), - ])), - ('requests_ntlm', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9189c92e8c61ae91402a64b972c4802b2457ce6a799d658256ebf084d5c7eb71')]), - ])), - ('nest_asyncio', '1.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'eaa09ef1353ebefae19162ad423eef7a12166bcc63866f8bff8f3635353cd9fa')]), - ])), - ('qiskit-ibmq-provider', '0.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7e8b93cf12963c46989fac6d1540346a9fb1bc8f5768b2d64e8b1c6714a9c0de')]), - ('modulename', 'qiskit.providers.ibmq'), - ])), - ('qiskit-aer', local_aerver, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0fec1fa3d528cc8e9de13713cb243fe40a98c60dc27111b479f1df707d9c03a4')]), - ('modulename', 'qiskit.providers.aer'), - ])), - # (possible addition in future) qiskit-aer-gpu == qiskit-aer+CUDA - ('qiskit', version, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6529ef7738c85341fb1008c33ca84111a6bc1de3c9072aae36206848550d0091')]), - ])), -] - -postinstallcmds = [ - # qiskit/providers/aer/VERSION.txt does not get installed, but is required by qiskit/providers/aer/version.py - "echo %s > %%(installdir)s/lib/python%%(pyshortver)s/site-packages/qiskit/providers/aer/VERSION.txt" % local_aerver, -] - -moduleclass = 'quantum' diff --git a/Golden_Repo/q/Qiskit/Qiskit-0.23.5-gpsmkl-2021-Python-3.8.5.eb b/Golden_Repo/q/Qiskit/Qiskit-0.23.5-gpsmkl-2021-Python-3.8.5.eb deleted file mode 100644 index b8d53222ce2b723712cb18f747e048c585f2ceff..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qiskit/Qiskit-0.23.5-gpsmkl-2021-Python-3.8.5.eb +++ /dev/null @@ -1,158 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'Qiskit' -version = '0.23.5' -versionsuffix = '-Python-%(pyver)s' -local_aerver = '0.7.4' # version of qiskit-aer - -homepage = 'https://qiskit.org' -description = """Qiskit is an open-source framework for working with noisy quantum computers - at the level of pulses, circuits, and algorithms.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('Ninja', '1.10.0'), - ('pkg-config', '0.29.2'), - ('Rust', '1.47.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('scikit', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('CVXOPT', '1.2.5', versionsuffix), - ('h5py', '2.10.0', '-serial' + versionsuffix), - ('PySCF', '1.7.1', versionsuffix), - ('pybind11', '2.5.0', versionsuffix), # for qiskit-aer - ('nlohmann-json', '3.9.1'), # for qiskit-aer - ('spdlog', '1.8.2'), # for qiskit-aer - ('muparserx', '4.0.8'), # for qiskit-aer -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -# qiskit-aer must not use CONAN to install dependencies -modextravars = {'DISABLE_CONAN': 'YES'} - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'use_pip': True, - 'sanity_pip_check': False, # DISABLED: because 'pip check' does not find pyscf (not installed with pip) - 'download_dep_fail': True, - 'use_pip_for_deps': False, -} - -exts_list = [ - ('python_constraint', '1.4.0', dict(list(local_common_opts.items()) + [ - ('source_tmpl', 'python-constraint-%(version)s.tar.bz2'), - ('checksums', [('sha256', '501d6f17afe0032dfc6ea6c0f8acc12e44f992733f00e8538961031ef27ccb8e')]), - ('modulename', 'constraint'), - ])), - ('dill', '0.3.1.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '42d8ef819367516592a825746a18073ced42ca169ab1f5f4044134703e7a049c')]), - ])), - ('arrow', '0.15.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '5390e464e2c5f76971b60ffa7ee29c598c7501a294bc9f5e6dadcb251a5d027b')]), - ])), - ('marshmallow', '2.20.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a339159e422a055269f5625df51fbdc7fb20512cfffa08451cd5727783ddca39')]), - ])), - ('marshmallow-polyfield', '3.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '72980cb9a43a7c750580b4b08e9d01a8cbd583e1f59360f1924a1ed60f065a4c')]), - ])), - ('pylatexenc', '1.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ef2d5260c38e2cb4d2829e8b918914a558557820d4f57cb6588a81e827de2bb3')]), - ])), - ('qiskit-terra', '0.16.4', dict(list(local_common_opts.items()) + [ - ('patches', ['qiskit-terra-0.9.0_fix-qiskit-version-env.patch']), - ('checksums', [( - # qiskit-terra-0.16.4.tar.gz - ('sha256', '2322e7e8d5a208c649432304f494bf73c4a4ce2f852128125fbaf9dbed1c1538'), - # qiskit-terra-0.9.0_fix-qiskit-version-env.patch - ('sha256', 'a297f338a97f956513d849286087bab5508301e214054d07482efc90a5ab1b74'), - )]), - ('modulename', 'qiskit.qobj'), - ])), - # retworkx installs `setuptools-rust`, if we do not - ('semantic_version', '2.8.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'd2cb2de0558762934679b9a104e82eca7af448c9f4974d1f3eeccff651df8a54')]), - ])), - ('setuptools-rust', '0.11.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'a5b5954909cbc5d66b914ee6763f81fa2610916041c7266105a469f504a7c4ca')]), - ])), - ('retworkx', '0.7.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '3fcd08031b743766935e01696c01545a24d1ef16c854333ba835c96a66eb76a9')]), - ])), - ('fastjsonschema', '2.14.5', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'afbc235655f06356e46caa80190512e4d9222abfaca856041be5a74c665fa094')]), - ])), - ('qiskit-ignis', '0.5.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ca0eca5ad265388b1cd8914ceb31c50a6101f4b2de634553dcb2b5c9e122c840')]), - ('modulename', 'qiskit.ignis'), - ])), - ('fastdtw', '0.3.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '2350fa6ec36bcad186eaf81f46eff35181baf04e324f522de8aeb43d0243f64f')]), - ])), - ('dlx', '1.0.4', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'ef75bc9d590216ebde7d4811f9ae6b2d6c6dc2a54772d94ae13384dc517a5aae')]), - ])), - ('docloud', '1.0.375', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '996d55407498fd01e6c6c480f367048f92255e9ca9db0e9ea19aaef91328a441')]), - ])), - ('docplex', '2.10.154', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '34a36ff25dd44b72cfe76e01dd0658eb7a451b0c3a52ef9f98e8e38fed83d537')]), - ])), - ('joblib', '0.14.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0630eea4f5664c463f23fbf5dcfc54a2bc6168902719fa8e19daf033022786c8')]), - ])), - ('inflection', '0.3.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '18ea7fb7a7d152853386523def08736aa8c32636b047ade55f7578c4edeb16ca')]), - ])), - ('Quandl', '3.5.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'c3a9fc5ec1b585eeacd97531454e7795dde7a072c057f21335e5918cb905fc7e')]), - ])), - ('qiskit-aqua', '0.8.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('bea761485826bb3858b3b446571b944dd4c6689f81f16ad96ff8a317b35613c6')]), - ('modulename', 'qiskit.aqua'), - ])), - ('websockets', '7.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '08e3c3e0535befa4f0c4443824496c03ecc25062debbcf895874f8a0b4c97c9f')]), - ])), - ('ntlm-auth', '1.4.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '350f2389c8ee5517f47db55a36ac2f8efc9742a60a678d6e2caa92385bdcaa9a')]), - ])), - ('requests_ntlm', '1.1.0', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9189c92e8c61ae91402a64b972c4802b2457ce6a799d658256ebf084d5c7eb71')]), - ])), - ('nest_asyncio', '1.4.3', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', 'eaa09ef1353ebefae19162ad423eef7a12166bcc63866f8bff8f3635353cd9fa')]), - ])), - ('qiskit-ibmq-provider', '0.11.1', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '7e8b93cf12963c46989fac6d1540346a9fb1bc8f5768b2d64e8b1c6714a9c0de')]), - ('modulename', 'qiskit.providers.ibmq'), - ])), - ('qiskit-aer', local_aerver, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '0fec1fa3d528cc8e9de13713cb243fe40a98c60dc27111b479f1df707d9c03a4')]), - ('modulename', 'qiskit.providers.aer'), - ])), - # (possible addition in future) qiskit-aer-gpu == qiskit-aer+CUDA - ('qiskit', version, dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '6529ef7738c85341fb1008c33ca84111a6bc1de3c9072aae36206848550d0091')]), - ])), -] - -postinstallcmds = [ - # qiskit/providers/aer/VERSION.txt does not get installed, but is required by qiskit/providers/aer/version.py - "echo %s > %%(installdir)s/lib/python%%(pyshortver)s/site-packages/qiskit/providers/aer/VERSION.txt" % local_aerver, -] - -moduleclass = 'quantum' diff --git a/Golden_Repo/q/Qiskit/qiskit-terra-0.9.0_fix-qiskit-version-env.patch b/Golden_Repo/q/Qiskit/qiskit-terra-0.9.0_fix-qiskit-version-env.patch deleted file mode 100644 index 677563a70f8ed223cd9a5cef16c9ab3239d66395..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qiskit/qiskit-terra-0.9.0_fix-qiskit-version-env.patch +++ /dev/null @@ -1,13 +0,0 @@ -also include $LD_LIBRARY_PATH in environment where "python -m pip freeze" is run on 'import qiskit' -author: Kenneth Hoste (HPC-UGent) ---- qiskit-terra-0.9.0.orig/qiskit/version.py 2019-08-22 15:34:00.000000000 +0200 -+++ qiskit-terra-0.9.0/qiskit/version.py 2019-09-15 18:27:39.760817000 +0200 -@@ -28,7 +28,7 @@ - def _minimal_ext_cmd(cmd): - # construct minimal environment - env = {} -- for k in ['SYSTEMROOT', 'PATH']: -+ for k in ['SYSTEMROOT', 'PATH', 'LD_LIBRARY_PATH']: - v = os.environ.get(k) - if v is not None: - env[k] = v diff --git a/Golden_Repo/q/Qt5/Qt5-5.13.1_fix-avx2.patch b/Golden_Repo/q/Qt5/Qt5-5.13.1_fix-avx2.patch deleted file mode 100644 index a2c9a0cdf28969b499aaf1022e017a49152021c9..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.13.1_fix-avx2.patch +++ /dev/null @@ -1,14 +0,0 @@ -build qdrawhelper_avx2.cpp with -mavx2 rather than -march=core-avx2 to avoid compilation failures on non-AVX2 systems -cfr. https://bugreports.qt.io/browse/QTBUG-71564 (and https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69471) -author: Kenneth Hoste (HPC-UGent) ---- qt-everywhere-src-5.12.3.orig/qtbase/mkspecs/common/gcc-base.conf 2019-04-09 11:51:26.000000000 +0200 -+++ qt-everywhere-src-5.12.3/qtbase/mkspecs/common/gcc-base.conf 2019-05-01 19:21:50.683489624 +0200 -@@ -109,7 +109,7 @@ - QMAKE_CFLAGS_MIPS_DSPR2 += -mdspr2 - - # -march=haswell is supported as of GCC 4.9 and Clang 3.6 --QMAKE_CFLAGS_ARCH_HASWELL = -march=core-avx2 -+QMAKE_CFLAGS_ARCH_HASWELL = -mavx2 - - # Wrapper tools that understand .o/.a files with GIMPLE instead of machine code - QMAKE_AR_LTCG = gcc-ar cqs diff --git a/Golden_Repo/q/Qt5/Qt5-5.13.1_fix-qmake-libdir.patch b/Golden_Repo/q/Qt5/Qt5-5.13.1_fix-qmake-libdir.patch deleted file mode 100644 index cd12eeb29e1d7dd322b94d03e10e8418377f7390..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.13.1_fix-qmake-libdir.patch +++ /dev/null @@ -1,44 +0,0 @@ -replaces hardcoded paths to xlib and opengl by EB paths in qmake.conf -author: Alex Domingo, originally written for Qt-5.12.3, see pr #8544 -diff -Nru qtbase/mkspecs/linux-g++-64/qmake.conf qtbase/mkspecs/linux-g++-64/qmake.conf ---- qtbase/mkspecs/linux-g++-64/qmake.conf 2019-04-09 11:51:26.000000000 +0200 -+++ qtbase/mkspecs/linux-g++-64/qmake.conf 2019-06-04 00:26:20.921468000 +0200 -@@ -18,7 +18,7 @@ - include(../common/g++-unix.conf) - - --QMAKE_LIBDIR_X11 = /usr/X11R6/lib64 --QMAKE_LIBDIR_OPENGL = /usr/X11R6/lib64 -+QMAKE_LIBDIR_X11 = $$(EBROOTX11)/lib -+QMAKE_LIBDIR_OPENGL = $$(EBROOTLIBGLU)/lib - - load(qt_config) -diff -Nru qtbase/mkspecs/linux-icc-64/qmake.conf qtbase/mkspecs/linux-icc-64/qmake.conf ---- qtbase/mkspecs/linux-icc-64/qmake.conf 2019-04-09 11:51:26.000000000 +0200 -+++ qtbase/mkspecs/linux-icc-64/qmake.conf 2019-06-04 00:28:29.070834000 +0200 -@@ -12,5 +12,5 @@ - - # Change the all LIBDIR variables to use lib64 instead of lib - --QMAKE_LIBDIR_X11 = /usr/X11R6/lib64 --QMAKE_LIBDIR_OPENGL = /usr/X11R6/lib64 -+QMAKE_LIBDIR_X11 = $$(EBROOTX11)/lib -+QMAKE_LIBDIR_OPENGL = $$(EBROOTLIBGLU)/lib -diff -Nru qtbase/mkspecs/openbsd-g++/qmake.conf qtbase/mkspecs/openbsd-g++/qmake.conf ---- qtbase/mkspecs/openbsd-g++/qmake.conf 2019-04-09 11:51:26.000000000 +0200 -+++ qtbase/mkspecs/openbsd-g++/qmake.conf 2019-06-04 00:31:07.877995000 +0200 -@@ -12,10 +12,10 @@ - QMAKE_LIBDIR_POST = /usr/local/lib - - # System provided X11 defaults to X11R6 path on OpenBSD --QMAKE_INCDIR_X11 = /usr/X11R6/include --QMAKE_LIBDIR_X11 = /usr/X11R6/lib --QMAKE_INCDIR_OPENGL = /usr/X11R6/include --QMAKE_LIBDIR_OPENGL = /usr/X11R6/lib -+QMAKE_INCDIR_X11 = $$(EBROOTX11)/include -+QMAKE_LIBDIR_X11 = $$(EBROOTX11)/lib -+QMAKE_INCDIR_OPENGL = $$(EBROOTLIBGLU)/include -+QMAKE_LIBDIR_OPENGL = $$(EBROOTLIBGLU)/lib - - QMAKE_RPATHDIR += $$QMAKE_LIBDIR_X11 - diff --git a/Golden_Repo/q/Qt5/Qt5-5.14.1_fix-jaspermath.patch b/Golden_Repo/q/Qt5/Qt5-5.14.1_fix-jaspermath.patch deleted file mode 100644 index 92480059a5c74982dd6dd3a09eceea54e7db8f97..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.14.1_fix-jaspermath.patch +++ /dev/null @@ -1,13 +0,0 @@ -add math.h because jasper.h does not provide it any more in 2.0.19 -see commit https://github.com/qt/qtimageformats/commit/704868db61be1542c2d9e2b75ead00c45c56cc36 -diff -Nru qtimageformats/src/plugins/imageformats/jp2/qjp2handler.cpp qtimageformats/src/plugins/imageformats/jp2/qjp2handler.cpp ---- qt-everywhere-src-5.14.1.orig/qtimageformats/src/plugins/imageformats/jp2/qjp2handler.cpp 2020-09-02 17:38:18.915006000 +0200 -+++ qt-everywhere-src-5.14.1/qtimageformats/src/plugins/imageformats/jp2/qjp2handler.cpp 2020-09-02 17:38:34.375018000 +0200 -@@ -45,6 +45,7 @@ - #include "qcolor.h" - - #include <jasper/jasper.h> -+#include <math.h> // for pow - - QT_BEGIN_NAMESPACE - diff --git a/Golden_Repo/q/Qt5/Qt5-5.14.2-GCCcore-10.3.0.eb b/Golden_Repo/q/Qt5/Qt5-5.14.2-GCCcore-10.3.0.eb deleted file mode 100644 index 9fc8c03de367227f2489d025147a58568b58cd92..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.14.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,92 +0,0 @@ -# Note: Qt5 needs python to build. In CentOS 8 that fails, since there is no python executable. Adding 'Python' as -# dependencies does not work either, creates conflicts and ninja fails. The most convenient alternative is creating -# a symlink /usr/bin/python2 -> /path/to/python, where /path/to is in the user environment. Not elegant, but quickest -# time to solution. Best way would be to integrate that into the easyblock - -easyblock = 'EB_Qt' - -name = 'Qt5' -version = '5.14.2' - -homepage = 'https://qt.io/' -description = "Qt is a comprehensive cross-platform C++ application framework." - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -# disabling use of -ftree-vectorize is required to avoid compilation failures on some systems (e.g. Intel Skylake X) -toolchainopts = {'vectorize': False} - -source_urls = [ - 'https://download.qt.io/official_releases/qt/%(version_major_minor)s/%(version)s/single/', - 'https://download.qt.io/archive/qt/%(version_major_minor)s/%(version)s/single/' -] -sources = ['qt-everywhere-src-%(version)s.tar.xz'] -patches = [ - 'Qt5-5.13.1_fix-avx2.patch', - 'Qt5-5.13.1_fix-qmake-libdir.patch', - 'Qt5-5.14.1_fix-jaspermath.patch', - 'Qt5-5.14.1_fix-OF-Gentoo.patch', - 'Qt5-5.14.2_fix-missing-stdint.patch', - 'Qt5-5.14.2_fix-webengine-bison.patch', - 'Qt5-5.14.2_fix-std-runtime_error.patch', - # 'Qt5-5.14.2_fix-include-math.patch', -] -checksums = [ - # qt-everywhere-src-5.14.2.tar.xz - 'c6fcd53c744df89e7d3223c02838a33309bd1c291fcb6f9341505fe99f7f19fa', - # Qt5-5.13.1_fix-avx2.patch - '6f46005f056bf9e6ff3e5d012a874d18ee03b33e685941f2979c970be91a9dbc', - # Qt5-5.13.1_fix-qmake-libdir.patch - '511ca9c0599ceb1989f73d8ceea9199c041512d3a26ee8c5fd870ead2c10cb63', - # Qt5-5.14.1_fix-jaspermath.patch - '23f84af8d370092ff521e834b135f1540219e3549da2f940e363a7692df6a13c', -] - -osdependencies = [('python2')] # required to build QtWebEngine - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), - # deps for QtWebEngine - ('Bison', '3.7.6'), - ('flex', '2.6.4'), - ('gperf', '3.1'), - ('Ninja', '1.10.0'), - ('re2c', '1.3') -] - -dependencies = [ - ('double-conversion', '3.1.5'), - ('GLib', '2.64.4'), - ('PCRE2', '10.34'), - ('libpng', '1.6.37'), - # deps for QtWebEngine - ('X11', '20200222'), - ('fontconfig', '2.13.92'), - ('DBus', '1.12.20'), - ('libevent', '2.1.12'), - ('OpenGL', '2020'), - ('libjpeg-turbo', '2.0.5'), - ('NSS', '3.51'), - ('snappy', '1.1.8'), - ('JasPer', '2.0.19'), - ('bzip2', '1.0.8'), - ('ICU', '67.1'), -] - -modextravars = { - 'QT_XKB_CONFIG_ROOT': '$EBROOTX11/share/X11/xkb' -} - -# qtgamepad needs recent kernel/libevdev (fails on RHEL 6.x) -# qtwayland fails to build on (some) Centos 7 systems -configopts = '-skip qtgamepad -skip qtwayland' - -# ensure `python` is found to build QtQml -prebuildopts = 'ln -s /usr/bin/python2 %(builddir)s/python && export PATH=%(builddir)s:$PATH && ' - -# make sure QtWebEngine component is being built & installed -check_qtwebengine = True - -moduleclass = 'devel' diff --git a/Golden_Repo/q/Qt5/Qt5-5.14.2-GCCcore-9.3.0.eb b/Golden_Repo/q/Qt5/Qt5-5.14.2-GCCcore-9.3.0.eb deleted file mode 100644 index facf5b7e6b851db774c33f584a43db8860be52a4..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.14.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,83 +0,0 @@ -# Note: Qt5 needs python to build. In CentOS 8 that fails, since there is no python executable. Adding 'Python' as -# dependencies does not work either, creates conflicts and ninja fails. The most convenient alternative is creating -# a symlink /usr/bin/python2 -> /path/to/python, where /path/to is in the user environment. Not elegant, but quickest -# time to solution. Best way would be to integrate that into the easyblock - -easyblock = 'EB_Qt' - -name = 'Qt5' -version = '5.14.2' - -homepage = 'https://qt.io/' -description = "Qt is a comprehensive cross-platform C++ application framework." - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -# disabling use of -ftree-vectorize is required to avoid compilation failures on some systems (e.g. Intel Skylake X) -toolchainopts = {'vectorize': False} - -source_urls = [ - 'https://download.qt.io/official_releases/qt/%(version_major_minor)s/%(version)s/single/', - 'https://download.qt.io/archive/qt/%(version_major_minor)s/%(version)s/single/' -] -sources = ['qt-everywhere-src-%(version)s.tar.xz'] -patches = [ - 'Qt5-5.13.1_fix-avx2.patch', - 'Qt5-5.13.1_fix-qmake-libdir.patch', - 'Qt5-5.14.1_fix-jaspermath.patch', -] -checksums = [ - 'c6fcd53c744df89e7d3223c02838a33309bd1c291fcb6f9341505fe99f7f19fa', # qt-everywhere-src-5.14.2.tar.xz - '6f46005f056bf9e6ff3e5d012a874d18ee03b33e685941f2979c970be91a9dbc', # Qt5-5.13.1_fix-avx2.patch - '511ca9c0599ceb1989f73d8ceea9199c041512d3a26ee8c5fd870ead2c10cb63', # Qt5-5.13.1_fix-qmake-libdir.patch - '23f84af8d370092ff521e834b135f1540219e3549da2f940e363a7692df6a13c', # Qt5-5.14.1_fix-jaspermath.patch -] - -osdependencies = [('python2')] # required to build QtWebEngine - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), - # deps for QtWebEngine - ('Bison', '3.6.4'), - ('flex', '2.6.4'), - ('gperf', '3.1'), - ('Ninja', '1.10.0'), - ('re2c', '1.3') -] - -dependencies = [ - ('double-conversion', '3.1.5'), - ('GLib', '2.64.4'), - ('PCRE2', '10.34'), - ('libpng', '1.6.37'), - # deps for QtWebEngine - ('X11', '20200222'), - ('fontconfig', '2.13.92'), - ('DBus', '1.12.20'), - ('libevent', '2.1.12'), - ('OpenGL', '2020'), - ('libjpeg-turbo', '2.0.5'), - ('NSS', '3.51'), - ('snappy', '1.1.8'), - ('JasPer', '2.0.19'), - ('bzip2', '1.0.8'), - ('ICU', '67.1'), -] - -modextravars = { - 'QT_XKB_CONFIG_ROOT': '$EBROOTX11/share/X11/xkb' -} - -# qtgamepad needs recent kernel/libevdev (fails on RHEL 6.x) -# qtwayland fails to build on (some) Centos 7 systems -configopts = '-skip qtgamepad -skip qtwayland' - -# ensure `python` is found to build QtQml -prebuildopts = 'ln -s /usr/bin/python2 %(builddir)s/python && export PATH=%(builddir)s:$PATH && ' - -# make sure QtWebEngine component is being built & installed -check_qtwebengine = True - -moduleclass = 'devel' diff --git a/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-include-math.patch b/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-include-math.patch deleted file mode 100644 index 57d817537433740c4159d249842183c29e9005ad..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-include-math.patch +++ /dev/null @@ -1,13 +0,0 @@ -add missing include statement to fix compilation error for qjp2handler.cpp with recent JasPer: -"error: pow was not declared in this scope" -see also https://github.com/qt/qtimageformats/commit/704868db61be1542c2d9e2b75ead00c45c56cc36 ---- qt-everywhere-src-5.14.2/qtimageformats/src/plugins/imageformats/jp2/qjp2handler.cpp.orig 2021-02-25 13:10:44.890921650 +0100 -+++ qt-everywhere-src-5.14.2/qtimageformats/src/plugins/imageformats/jp2/qjp2handler.cpp 2021-02-25 13:11:04.310799662 +0100 -@@ -45,6 +45,7 @@ - #include "qcolor.h" - - #include <jasper/jasper.h> -+#include <math.h> // for pow - - QT_BEGIN_NAMESPACE - diff --git a/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-missing-stdint.patch b/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-missing-stdint.patch deleted file mode 100644 index 651c20a2b44eefb32ddbea7ba769d759df116f44..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-missing-stdint.patch +++ /dev/null @@ -1,70 +0,0 @@ -# Missing include leads to compilation error. Seemingly only affects 5.14.2 -# Ref: https://bugreports.qt.io/browse/QTBUG-83225 -# Mikael Öhman <micketeer@gmail.com> ---- qtlocation/src/3rdparty/mapbox-gl-native/src/mbgl/util/convert.cpp.orig 2020-11-21 03:17:06.465885355 +0100 -+++ qtlocation/src/3rdparty/mapbox-gl-native/src/mbgl/util/convert.cpp 2020-11-21 03:16:51.225862093 +0100 -@@ -1,3 +1,4 @@ -+#include <stdint.h> - #include <mbgl/util/convert.hpp> - - namespace mbgl { ---- ./qtwebengine/src/3rdparty/chromium/base/task_runner.h.orig 2020-11-23 11:41:09.804800874 +0100 -+++ ./qtwebengine/src/3rdparty/chromium/base/task_runner.h 2020-11-23 11:39:55.982675225 +0100 -@@ -6,6 +6,7 @@ - #define BASE_TASK_RUNNER_H_ - - #include <stddef.h> -+#include <stdint.h> - - #include "base/base_export.h" - #include "base/callback.h" ---- ./qtwebengine/src/3rdparty/chromium/third_party/perfetto/include/perfetto/base/task_runner.h.orig 2020-11-23 19:05:25.400843511 +0100 -+++ ./qtwebengine/src/3rdparty/chromium/third_party/perfetto/include/perfetto/base/task_runner.h 2020-11-23 19:05:40.863866361 +0100 -@@ -17,6 +17,7 @@ - #ifndef INCLUDE_PERFETTO_BASE_TASK_RUNNER_H_ - #define INCLUDE_PERFETTO_BASE_TASK_RUNNER_H_ - -+#include <stdint.h> - #include <functional> - - #include "perfetto/base/export.h" ---- ./qtwebengine/src/3rdparty/chromium/third_party/webrtc/call/rtx_receive_stream.h.orig 2020-11-23 19:54:51.112219468 +0100 -+++ ./qtwebengine/src/3rdparty/chromium/third_party/webrtc/call/rtx_receive_stream.h 2020-11-23 19:55:01.389234744 +0100 -@@ -11,6 +11,7 @@ - #ifndef CALL_RTX_RECEIVE_STREAM_H_ - #define CALL_RTX_RECEIVE_STREAM_H_ - -+#include <stdint.h> - #include <map> - - #include "call/rtp_packet_sink_interface.h" ---- ./qtwebengine/src/3rdparty/chromium/third_party/webrtc/modules/audio_processing/aec3/clockdrift_detector.h.orig 2020-11-23 20:24:51.676954471 +0100 -+++ ./qtwebengine/src/3rdparty/chromium/third_party/webrtc/modules/audio_processing/aec3/clockdrift_detector.h 2020-11-23 20:25:03.439972877 +0100 -@@ -11,6 +11,7 @@ - #ifndef MODULES_AUDIO_PROCESSING_AEC3_CLOCKDRIFT_DETECTOR_H_ - #define MODULES_AUDIO_PROCESSING_AEC3_CLOCKDRIFT_DETECTOR_H_ - -+#include <cstddef> - #include <array> - - namespace webrtc { ---- ./qtwebengine/src/3rdparty/chromium/third_party/webrtc/modules/video_coding/decoding_state.h.orig 2020-11-23 20:58:34.577202208 +0100 -+++ ./qtwebengine/src/3rdparty/chromium/third_party/webrtc/modules/video_coding/decoding_state.h 2020-11-23 20:58:22.770184640 +0100 -@@ -11,6 +11,7 @@ - #ifndef MODULES_VIDEO_CODING_DECODING_STATE_H_ - #define MODULES_VIDEO_CODING_DECODING_STATE_H_ - -+#include <stdint.h> - #include <map> - #include <set> - #include <vector> ---- ./qtwebengine/src/3rdparty/chromium/media/cdm/supported_cdm_versions.h.orig 2020-11-24 18:20:48.115156207 +0100 -+++ ./qtwebengine/src/3rdparty/chromium/media/cdm/supported_cdm_versions.h 2020-11-24 18:21:21.415054163 +0100 -@@ -5,6 +5,7 @@ - #ifndef MEDIA_CDM_SUPPORTED_CDM_VERSIONS_H_ - #define MEDIA_CDM_SUPPORTED_CDM_VERSIONS_H_ - -+#include <cstddef> - #include <array> - - #include "media/base/media_export.h" diff --git a/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-std-runtime_error.patch b/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-std-runtime_error.patch deleted file mode 100644 index b2d8835173246d3458bd29d9a3c150caf2b32744..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-std-runtime_error.patch +++ /dev/null @@ -1,12 +0,0 @@ -qtlocation fix "error: 'runtime_error' is not a member of 'std'" -Patch by Simon Branford, University of Birmingham ---- qtlocation/src/3rdparty/mapbox-gl-native/platform/default/bidi.cpp.orig 2021-01-14 19:41:10.485471000 +0000 -+++ qtlocation/src/3rdparty/mapbox-gl-native/platform/default/bidi.cpp 2021-01-14 19:41:18.997592000 +0000 -@@ -5,6 +5,7 @@ - #include <unicode/ushape.h> - - #include <memory> -+#include <stdexcept> - - namespace mbgl { - diff --git a/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-webengine-bison.patch b/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-webengine-bison.patch deleted file mode 100644 index d095616442b2e6e53492aca48632e38112f7b6fc..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/Qt5/Qt5-5.14.2_fix-webengine-bison.patch +++ /dev/null @@ -1,57 +0,0 @@ -Stolen from https://code.qt.io/cgit/qt/qtwebengine-chromium.git/commit/?h=80-based&id=1a53f599 -with paths adjusted for Qt 3rdparty: - -From 1a53f5995697f5ac6fd501dbdc0ee39c9488ee66 Mon Sep 17 00:00:00 2001 -From: Allan Sandfeld Jensen <allan.jensen@qt.io> -Date: Fri, 14 Aug 2020 16:38:48 +0200 -Subject: Fix bison 3.7 -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit - -Do a replace run inspired by newer versions of the script. - -Fixes: QTBUG-86018 -Change-Id: Ib1dc771e22a662aff0fae842d135ad58fad08bc1 -Reviewed-by: Michael Brüning <michael.bruning@qt.io> ---- - .../blink/renderer/build/scripts/rule_bison.py | 19 +++++++++++++++++++ - 1 file changed, 19 insertions(+) - -diff --git a/chromium/third_party/blink/renderer/build/scripts/rule_bison.py b/chromium/third_party/blink/renderer/build/scripts/rule_bison.py -index f75e25fd23f..7e0767e951a 100755 ---- ./qtwebengine/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/rule_bison.py.orig -+++ ./qtwebengine/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/rule_bison.py -@@ -45,6 +45,19 @@ from utilities import abs - - from blinkbuild.name_style_converter import NameStyleConverter - -+def modify_file(path, prefix_lines, suffix_lines, replace_list=[]): -+ prefix_lines = map(lambda s: s + '\n', prefix_lines) -+ suffix_lines = map(lambda s: s + '\n', suffix_lines) -+ with open(path, 'r') as f: -+ old_lines = f.readlines() -+ for i in range(len(old_lines)): -+ for src, dest in replace_list: -+ old_lines[i] = old_lines[i].replace(src, dest) -+ new_lines = prefix_lines + old_lines + suffix_lines -+ with open(path, 'w') as f: -+ f.writelines(new_lines) -+ -+ - assert len(sys.argv) == 4 or len(sys.argv) == 5 - - inputFile = abs(sys.argv[1]) -@@ -115,3 +128,9 @@ print >>outputHFile, '#define %s' % headerGuard - print >>outputHFile, outputHContents - print >>outputHFile, '#endif // %s' % headerGuard - outputHFile.close() -+ -+common_replace_list = [(inputRoot + '.hh', -+ inputRoot + '.h')] -+modify_file( -+ outputCpp, [], [], -+ replace_list=common_replace_list) --- -cgit v1.2.1 - diff --git a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.6-gpsmkl-2020.eb b/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.6-gpsmkl-2020.eb deleted file mode 100644 index cbd723b35baed761e0ac47e000a519839458771e..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.6-gpsmkl-2020.eb +++ /dev/null @@ -1,55 +0,0 @@ -name = 'QuantumESPRESSO' -version = '6.6' - -homepage = 'https://www.quantum-espresso.org' -description = """Quantum ESPRESSO is an integrated suite of computer codes -for electronic-structure calculations and materials modeling at the nanoscale. -It is based on density-functional theory, plane waves, and pseudopotentials -(both norm-conserving and ultrasoft). -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True, 'openmp': True} - -# major part of this list was determined from espresso/install/plugins_list -# - There is currently no plumed version that works for QME 6.3 -# - Yambo is better to install outside of QM-E, there are problems -# building it with openmp turned on and the intel toolchain and there is -# no added benefit to build it inside QM-E -# - Want is never installed in bin/ (since it first shows up in 4.2) so -# don't bother building it here. - -source_urls = [ - 'https://github.com/QEF/q-e/releases/download/qe-%(version)s/', - 'https://github.com/dceresoli/qe-gipaw/archive/', - 'https://github.com/wannier-developers/wannier90/archive/' -] -sources = [ - 'qe-%(version)s-ReleasePack.tgz', - # to be uncommented once qe-gipaw-6.6 is released - # {'filename': 'qe-gipaw-%(version)s.tar.gz', 'download_filename': '%(version)s.tar.gz'}, - {'filename': 'wannier90-3.1.0.tar.gz', 'download_filename': 'v3.1.0.tar.gz'}, -] -checksums = [ - 'de6996b9f1bf480bcd0166d24e918f5ff3c8fdb710f59f781bc9d33819280eb5', # qe-6.6-ReleasePack.tgz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', # wannier90-3.1.0.tar.gz -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('ELPA', '2020.05.001'), - ('libxc', '4.3.4'), -] - -# The third party packages should be installed separately and added as -# dependencies. The exception is w90, which is force built, and gipaw -# which depends on qe source -# gipaw is disabled in this build because it doesn't have a version compatible with 6.6 -buildopts = 'all gwl xspectra couple epw w90' # gipaw - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.6-intel-para-2020.eb b/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.6-intel-para-2020.eb deleted file mode 100644 index 7b3ae341c0c3b6888b7e88c14bf54ff5fa599faa..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.6-intel-para-2020.eb +++ /dev/null @@ -1,55 +0,0 @@ -name = 'QuantumESPRESSO' -version = '6.6' - -homepage = 'https://www.quantum-espresso.org' -description = """Quantum ESPRESSO is an integrated suite of computer codes -for electronic-structure calculations and materials modeling at the nanoscale. -It is based on density-functional theory, plane waves, and pseudopotentials -(both norm-conserving and ultrasoft). -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True, 'openmp': True} - -# major part of this list was determined from espresso/install/plugins_list -# - There is currently no plumed version that works for QME 6.3 -# - Yambo is better to install outside of QM-E, there are problems -# building it with openmp turned on and the intel toolchain and there is -# no added benefit to build it inside QM-E -# - Want is never installed in bin/ (since it first shows up in 4.2) so -# don't bother building it here. - -source_urls = [ - 'https://github.com/QEF/q-e/releases/download/qe-%(version)s/', - 'https://github.com/dceresoli/qe-gipaw/archive/', - 'https://github.com/wannier-developers/wannier90/archive/' -] -sources = [ - 'qe-%(version)s-ReleasePack.tgz', - # to be uncommented once qe-gipaw-6.6 is released - # {'filename': 'qe-gipaw-%(version)s.tar.gz', 'download_filename': '%(version)s.tar.gz'}, - {'filename': 'wannier90-3.1.0.tar.gz', 'download_filename': 'v3.1.0.tar.gz'}, -] -checksums = [ - 'de6996b9f1bf480bcd0166d24e918f5ff3c8fdb710f59f781bc9d33819280eb5', # qe-6.6-ReleasePack.tgz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', # wannier90-3.1.0.tar.gz -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('ELPA', '2020.05.001'), - ('libxc', '4.3.4'), -] - -# The third party packages should be installed separately and added as -# dependencies. The exception is w90, which is force built, and gipaw -# which depends on qe source -# gipaw is disabled in this build because it doesn't have a version compatible with 6.6 -buildopts = 'all gwl xspectra couple epw w90' # gipaw - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-gomkl-2021.eb b/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-gomkl-2021.eb deleted file mode 100644 index 5c71b76360fd9684984c071567c9362335718763..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-gomkl-2021.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = 'QuantumESPRESSO' -version = '6.7' - -homepage = 'https://www.quantum-espresso.org' -description = """Quantum ESPRESSO is an integrated suite of computer codes -for electronic-structure calculations and materials modeling at the nanoscale. -It is based on density-functional theory, plane waves, and pseudopotentials -(both norm-conserving and ultrasoft). -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'usempi': True, 'openmp': True} - -source_urls = [ - 'https://github.com/QEF/q-e/releases/download/qe-%(version)s.0/', - 'https://github.com/dceresoli/qe-gipaw/archive/', - 'https://github.com/wannier-developers/wannier90/archive/' -] -sources = [ - 'qe-%(version)s-ReleasePack.tgz', - {'filename': 'qe-gipaw-%(version)s.tar.gz', - 'download_filename': '%(version)sMaX.tar.gz'}, - {'filename': 'wannier90-3.1.0.tar.gz', 'download_filename': 'v3.1.0.tar.gz'}, -] -checksums = [ - # qe-6.7-ReleasePack.tgz - '8f06ea31ae52ad54e900a2f51afd5c70f78096d9dcf39c86c2b17dccb1ec9c87', - # qe-gipaw-%(version)s.tar.gz - '95d2ed2f4d27f044dba171bdf8c1913a67ebc8846ed3463462828f2d414a2a61', - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('ELPA', '2020.11.001'), - ('libxc', '5.1.5'), -] - -# The third party packages should be installed separately and added as -# dependencies. The exception is w90, which is force built, and gipaw -# which depends on qe source -buildopts = 'all gwl xspectra couple epw gipaw w90' - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-gpsmkl-2021.eb b/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-gpsmkl-2021.eb deleted file mode 100644 index 70d6a3ddb63eceb5d9980b5cc5cf94fe167ccffd..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-gpsmkl-2021.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = 'QuantumESPRESSO' -version = '6.7' - -homepage = 'https://www.quantum-espresso.org' -description = """Quantum ESPRESSO is an integrated suite of computer codes -for electronic-structure calculations and materials modeling at the nanoscale. -It is based on density-functional theory, plane waves, and pseudopotentials -(both norm-conserving and ultrasoft). -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'usempi': True, 'openmp': True} - -source_urls = [ - 'https://github.com/QEF/q-e/releases/download/qe-%(version)s.0/', - 'https://github.com/dceresoli/qe-gipaw/archive/', - 'https://github.com/wannier-developers/wannier90/archive/' -] -sources = [ - 'qe-%(version)s-ReleasePack.tgz', - {'filename': 'qe-gipaw-%(version)s.tar.gz', - 'download_filename': '%(version)sMaX.tar.gz'}, - {'filename': 'wannier90-3.1.0.tar.gz', 'download_filename': 'v3.1.0.tar.gz'}, -] -checksums = [ - # qe-6.7-ReleasePack.tgz - '8f06ea31ae52ad54e900a2f51afd5c70f78096d9dcf39c86c2b17dccb1ec9c87', - # qe-gipaw-%(version)s.tar.gz - '95d2ed2f4d27f044dba171bdf8c1913a67ebc8846ed3463462828f2d414a2a61', - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('ELPA', '2020.11.001'), - ('libxc', '5.1.5'), -] - -# The third party packages should be installed separately and added as -# dependencies. The exception is w90, which is force built, and gipaw -# which depends on qe source -buildopts = 'all gwl xspectra couple epw gipaw w90' - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-intel-2021.eb b/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-intel-2021.eb deleted file mode 100644 index babee304172770726f40709057106a85ce26e3f3..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-intel-2021.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = 'QuantumESPRESSO' -version = '6.7' - -homepage = 'https://www.quantum-espresso.org' -description = """Quantum ESPRESSO is an integrated suite of computer codes -for electronic-structure calculations and materials modeling at the nanoscale. -It is based on density-functional theory, plane waves, and pseudopotentials -(both norm-conserving and ultrasoft). -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'usempi': True, 'openmp': True} - -source_urls = [ - 'https://github.com/QEF/q-e/releases/download/qe-%(version)s.0/', - 'https://github.com/dceresoli/qe-gipaw/archive/', - 'https://github.com/wannier-developers/wannier90/archive/' -] -sources = [ - 'qe-%(version)s-ReleasePack.tgz', - {'filename': 'qe-gipaw-%(version)s.tar.gz', - 'download_filename': '%(version)sMaX.tar.gz'}, - {'filename': 'wannier90-3.1.0.tar.gz', 'download_filename': 'v3.1.0.tar.gz'}, -] -checksums = [ - # qe-6.7-ReleasePack.tgz - '8f06ea31ae52ad54e900a2f51afd5c70f78096d9dcf39c86c2b17dccb1ec9c87', - # qe-gipaw-%(version)s.tar.gz - '95d2ed2f4d27f044dba171bdf8c1913a67ebc8846ed3463462828f2d414a2a61', - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('ELPA', '2020.11.001'), - ('libxc', '5.1.5'), -] - -# The third party packages should be installed separately and added as -# dependencies. The exception is w90, which is force built, and gipaw -# which depends on qe source -buildopts = 'all gwl xspectra couple epw gipaw w90' - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-intel-para-2021.eb b/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-intel-para-2021.eb deleted file mode 100644 index 64b91c8f6d3ae07c84d28653c5c80a5de7ac3f39..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-intel-para-2021.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = 'QuantumESPRESSO' -version = '6.7' - -homepage = 'https://www.quantum-espresso.org' -description = """Quantum ESPRESSO is an integrated suite of computer codes -for electronic-structure calculations and materials modeling at the nanoscale. -It is based on density-functional theory, plane waves, and pseudopotentials -(both norm-conserving and ultrasoft). -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'usempi': True, 'openmp': True} - -source_urls = [ - 'https://github.com/QEF/q-e/releases/download/qe-%(version)s.0/', - 'https://github.com/dceresoli/qe-gipaw/archive/', - 'https://github.com/wannier-developers/wannier90/archive/' -] -sources = [ - 'qe-%(version)s-ReleasePack.tgz', - {'filename': 'qe-gipaw-%(version)s.tar.gz', - 'download_filename': '%(version)sMaX.tar.gz'}, - {'filename': 'wannier90-3.1.0.tar.gz', 'download_filename': 'v3.1.0.tar.gz'}, -] -checksums = [ - # qe-6.7-ReleasePack.tgz - '8f06ea31ae52ad54e900a2f51afd5c70f78096d9dcf39c86c2b17dccb1ec9c87', - # qe-gipaw-%(version)s.tar.gz - '95d2ed2f4d27f044dba171bdf8c1913a67ebc8846ed3463462828f2d414a2a61', - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('ELPA', '2020.11.001'), - ('libxc', '5.1.5'), -] - -# The third party packages should be installed separately and added as -# dependencies. The exception is w90, which is force built, and gipaw -# which depends on qe source -buildopts = 'all gwl xspectra couple epw gipaw w90' - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-iomkl-2021.eb b/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-iomkl-2021.eb deleted file mode 100644 index 5eebf295493355cd4cab488e754344d6afb59be3..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/QuantumESPRESSO/QuantumESPRESSO-6.7-iomkl-2021.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = 'QuantumESPRESSO' -version = '6.7' - -homepage = 'https://www.quantum-espresso.org' -description = """Quantum ESPRESSO is an integrated suite of computer codes -for electronic-structure calculations and materials modeling at the nanoscale. -It is based on density-functional theory, plane waves, and pseudopotentials -(both norm-conserving and ultrasoft). -""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'usempi': True, 'openmp': True} - -source_urls = [ - 'https://github.com/QEF/q-e/releases/download/qe-%(version)s.0/', - 'https://github.com/dceresoli/qe-gipaw/archive/', - 'https://github.com/wannier-developers/wannier90/archive/' -] -sources = [ - 'qe-%(version)s-ReleasePack.tgz', - {'filename': 'qe-gipaw-%(version)s.tar.gz', - 'download_filename': '%(version)sMaX.tar.gz'}, - {'filename': 'wannier90-3.1.0.tar.gz', 'download_filename': 'v3.1.0.tar.gz'}, -] -checksums = [ - # qe-6.7-ReleasePack.tgz - '8f06ea31ae52ad54e900a2f51afd5c70f78096d9dcf39c86c2b17dccb1ec9c87', - # qe-gipaw-%(version)s.tar.gz - '95d2ed2f4d27f044dba171bdf8c1913a67ebc8846ed3463462828f2d414a2a61', - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', -] - -dependencies = [ - ('HDF5', '1.10.6'), - ('ELPA', '2020.11.001'), - ('libxc', '5.1.5'), -] - -# The third party packages should be installed separately and added as -# dependencies. The exception is w90, which is force built, and gipaw -# which depends on qe source -buildopts = 'all gwl xspectra couple epw gipaw w90' - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/q/qcint/qcint-3.0.19-gcccoremkl-10.3.0-2021.2.0.eb b/Golden_Repo/q/qcint/qcint-3.0.19-gcccoremkl-10.3.0-2021.2.0.eb deleted file mode 100644 index d1613466f78eeb081e17489aa849000f7eac99b4..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/qcint/qcint-3.0.19-gcccoremkl-10.3.0-2021.2.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'qcint' -version = '3.0.19' - -homepage = 'http://wiki.sunqm.net/libcint' -description = """libcint is an open source library for analytical Gaussian integrals. -qcint is an optimized libcint branch for the x86-64 platform.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'optarch': False, 'noopt': True} # FIXME failing tests - -source_urls = ['https://github.com/sunqm/qcint/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['b46354becc284be4638d09e26b2002bf73a2be53c1653c6a6755bfb7778202e5'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -separate_build_dir = True - -configopts = "-DENABLE_EXAMPLE=0 -DENABLE_TEST=0" - -buildopts = "VERBOSE=1" - -sanity_check_paths = { - 'files': ['include/cint.h', 'lib/libcint.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/q/qcint/qcint-3.0.19-gcccoremkl-9.3.0-2020.2.254.eb b/Golden_Repo/q/qcint/qcint-3.0.19-gcccoremkl-9.3.0-2020.2.254.eb deleted file mode 100644 index 5add7ac736284cb05330f9a981d98e1de974970e..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/qcint/qcint-3.0.19-gcccoremkl-9.3.0-2020.2.254.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'qcint' -version = '3.0.19' - -homepage = 'http://wiki.sunqm.net/libcint' -description = """libcint is an open source library for analytical Gaussian integrals. -qcint is an optimized libcint branch for the x86-64 platform.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'optarch': False, 'noopt': True} # FIXME failing tests - -source_urls = ['https://github.com/sunqm/qcint/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['b46354becc284be4638d09e26b2002bf73a2be53c1653c6a6755bfb7778202e5'] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -separate_build_dir = True - -configopts = "-DENABLE_EXAMPLE=0 -DENABLE_TEST=0" - -buildopts = "VERBOSE=1" - -sanity_check_paths = { - 'files': ['include/cint.h', 'lib/libcint.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/q/qrupdate/qrupdate-1.1.2-GCCcore-10.3.0.eb b/Golden_Repo/q/qrupdate/qrupdate-1.1.2-GCCcore-10.3.0.eb deleted file mode 100644 index d374a0529d8d4b11d4f72ee8546b9fac3e976fc3..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/qrupdate/qrupdate-1.1.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'MakeCp' - -name = 'qrupdate' -version = '1.1.2' - -homepage = 'https://sourceforge.net/projects/qrupdate/' -description = """qrupdate is a Fortran library for fast updates of QR and Cholesky decompositions.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCE_TAR_GZ] -source_urls = [SOURCEFORGE_SOURCE] - -patches = ['%(name)s-%(version)s_makeconf.patch'] - -builddependencies = [ - ('binutils', '2.36.1') -] - -buildopts = 'lib' - -files_to_copy = [(['libqrupdate.a'], 'lib')] - -sanity_check_paths = { - 'files': ['lib/libqrupdate.a'], - 'dirs': [], -} - -parallel = 1 - -moduleclass = 'numlib' diff --git a/Golden_Repo/q/qrupdate/qrupdate-1.1.2-GCCcore-9.3.0.eb b/Golden_Repo/q/qrupdate/qrupdate-1.1.2-GCCcore-9.3.0.eb deleted file mode 100644 index 34018ae3b1ed3a1fe5dd602c68b52885e494578d..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/qrupdate/qrupdate-1.1.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'MakeCp' - -name = 'qrupdate' -version = '1.1.2' - -homepage = 'https://sourceforge.net/projects/qrupdate/' -description = """qrupdate is a Fortran library for fast updates of QR and Cholesky decompositions.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCE_TAR_GZ] -source_urls = [SOURCEFORGE_SOURCE] - -patches = ['%(name)s-%(version)s_makeconf.patch'] - -builddependencies = [ - ('binutils', '2.34') -] - -buildopts = 'lib' - -files_to_copy = [(['libqrupdate.a'], 'lib')] - -sanity_check_paths = { - 'files': ['lib/libqrupdate.a'], - 'dirs': [], -} - -parallel = 1 - -moduleclass = 'numlib' diff --git a/Golden_Repo/q/qrupdate/qrupdate-1.1.2_makeconf.patch b/Golden_Repo/q/qrupdate/qrupdate-1.1.2_makeconf.patch deleted file mode 100644 index 8e0f5c03c6296679d934b64ce3ff9d510808b1c9..0000000000000000000000000000000000000000 --- a/Golden_Repo/q/qrupdate/qrupdate-1.1.2_makeconf.patch +++ /dev/null @@ -1,17 +0,0 @@ -# Pick FC and FFLAGS from environmental variables -# March 8th 2016 B. Hajgato (Free Uviveristy Brussels - VUB) ---- qrupdate-1.1.2/Makeconf.old 2010-01-19 12:35:49.000000000 +0100 -+++ qrupdate-1.1.2/Makeconf 2016-03-08 20:51:11.662603099 +0100 -@@ -1,9 +1,9 @@ - # set this to your compiler's executable name (e.g. gfortran, g77) --FC=gfortran -+FC?=gfortran - # requested flags --FFLAGS=-fimplicit-none -O3 -funroll-loops -+FFLAGS?=-fimplicit-none -O3 -funroll-loops - # set if you need shared library --FPICFLAGS=-fPIC -+FPICFLAGS= - - # BLAS library (only required for tests) - BLAS=-lblas diff --git a/Golden_Repo/r/R/R-4.0.2-gcccoremkl-9.3.0-2020.2.254-nompi.eb b/Golden_Repo/r/R/R-4.0.2-gcccoremkl-9.3.0-2020.2.254-nompi.eb deleted file mode 100644 index 719838a272dd976c03dd31536459a7af69cde72a..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/R/R-4.0.2-gcccoremkl-9.3.0-2020.2.254-nompi.eb +++ /dev/null @@ -1,2565 +0,0 @@ -name = 'R' -version = '4.0.2' -versionsuffix = '-nompi' - -homepage = 'https://www.r-project.org/' -description = """R is a free software environment for statistical computing - and graphics.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -source_urls = ['https://cloud.r-project.org/src/base/R-%(version_major)s'] -sources = [SOURCE_TAR_GZ] -checksums = ['d3bceab364da0876625e4097808b42512395fdf41292f4915ab1fd257c1bbe75'] - -builddependencies = [ - ('pkg-config', '0.29.2'), -] -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('cairo', '1.17.2'), - ('libreadline', '8.0'), - ('ncurses', '6.2'), - ('bzip2', '1.0.8'), - ('XZ', '5.2.5'), - ('zlib', '1.2.11'), - ('SQLite', '3.32.3'), - ('PCRE2', '10.34'), - ('libpng', '1.6.37'), # for plotting in R - ('libjpeg-turbo', '2.0.5'), # for plottting in R - ('LibTIFF', '4.1.0'), - ('Java', '1.8', '', SYSTEM), - ('Tk', '8.6.10'), # for tcltk - ('cURL', '7.71.1'), # for RCurl - ('libxml2', '2.9.10'), # for XML - ('PROJ', '7.1.0'), # for rgdal - ('GMP', '6.2.0'), # for igraph - ('NLopt', '2.6.2'), # for nloptr - ('FFTW', '3.3.8', '-nompi'), # for fftw - ('libsndfile', '1.0.28'), # for seewave - ('ICU', '67.1'), # for rJava & gdsfmt - ('HDF5', '1.10.6', '-serial'), # for hdf5r - ('UDUNITS', '2.2.26'), # for units - ('GSL', '2.6'), # for RcppGSL - ('ImageMagick', '7.0.10-25'), # for animation - ('GLPK', '4.65'), # for Rglpk - ('netCDF', '4.7.4', '-serial'), # the ndf4 needs it - ('GEOS', '3.8.1', '-Python-3.8.5'), # for rgeos - ('ZeroMQ', '4.3.3'), # for pbdZMQ needed by IRkernel - - # OS dependency should be preferred if the os version is more recent then - # this version, it's nice to have an up to date openssl for security - # reasons - # ('OpenSSL', '1.1.1e'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -preconfigopts = 'export LDFLAGS="$LDFLAGS -lm" && ' - -configopts = "--with-pic --enable-threads --enable-R-shlib" -# some recommended packages may fail in a parallel build (e.g. Matrix), and -# we're installing them anyway below -configopts += " --with-recommended-packages=no" - -# specify that at least EasyBuild v3.5.0 is required, -# since we rely on the updated easyblock for R to configure correctly w.r.t. -# BLAS/LAPACK -easybuild_version = '4.2.2' - -exts_default_options = { - 'source_urls': [ - 'https://cran.r-project.org/src/contrib/Archive/%(name)s', # package archive - 'https://cran.r-project.org/src/contrib/', # current version of packages - 'https://cran.freestatistics.org/src/contrib', # mirror alternative for current packages - ], - 'source_tmpl': '%(name)s_%(version)s.tar.gz', -} - -# !! order of packages is important !! -# packages updated on 23 July 2020 -exts_list = [ - 'base', - 'datasets', - 'graphics', - 'grDevices', - 'grid', - 'methods', - 'splines', - 'stats', - 'stats4', - 'tools', - 'utils', - 'parallel', - ('abind', '1.4-5', { - 'checksums': ['3a3ace5afbcb86e56889efcebf3bf5c3bb042a282ba7cc4412d450bb246a3f2c'], - }), - ('magic', '1.5-9', { - 'checksums': ['fa1d5ef2d39e880f262d31b77006a2a7e76ea38e306aae4356e682b90d6cd56a'], - }), - ('Rcpp', '1.0.4.6', { - 'checksums': ['45af675ddbbe155e671453b2e84fe32250bb98d4ccb4342b61c1e25cff10b302'], - }), - ('RcppProgress', '0.4.2', { - 'checksums': ['b1624b21b7aeb1dafb30f092b2a4bef4c3504efd2d6b00b2cdf55dc9df194b48'], - }), - ('lpSolve', '5.6.15', { - 'checksums': ['4627be4178abad34fc85a7d264c2eb5e27506f007e46687b0b8a4f8fbdf4f3ba'], - }), - ('linprog', '0.9-2', { - 'checksums': ['8937b2e30692e38de1713f1513b78f505f73da6f5b4a576d151ad60bac2221ce'], - }), - ('geometry', '0.4.5', { - 'checksums': ['8fedd17c64468721d398e3c17a39706321ab71098b29f5e8d8039dd115a220d8'], - }), - ('bit', '1.1-15.2', { - 'checksums': ['0b83e78385293d6cdc0189a07fcc3f9f9db286c8c4af3288467f5257e79cb28b'], - }), - ('filehash', '2.4-2', { - 'checksums': ['b6d056f75d45e315943a4618f5f62802612cd8931ba3f9f474b595140a3cfb93'], - }), - ('ff', '2.2-14.2', { - 'checksums': ['f8c06ac333ffe3545cdf56531619865176e1827728f7aeeba82d135d5c5e37e4'], - }), - ('bnlearn', '4.5', { - 'checksums': ['a8047625533260a855d309b3c0785cbeec0f9ec13f284b6664a1f61638138578'], - }), - ('bootstrap', '2019.6', { - 'checksums': ['5252fdfeb944cf1fae35016d35f9333b1bd1fc8c6d4a14e33901160e21968694'], - }), - ('combinat', '0.0-8', { - 'checksums': ['1513cf6b6ed74865bfdd9f8ca58feae12b62f38965d1a32c6130bef810ca30c1'], - }), - ('deal', '1.2-39', { - 'checksums': ['a349db8f1c86cbd8315c068da49314ce9eb585dbb50d2e5ff09300506bd8806b'], - }), - ('fdrtool', '1.2.15', { - 'checksums': ['65f964aa768d0703ceb7a199adc5e79ca79a6d29d7bc053a262eb533697686c0'], - }), - ('formatR', '1.7', { - 'checksums': ['a366621b3ff5f8e86a499b6f87858ad47eefdace138341b1377ecc307a5e5ddb'], - }), - ('gtools', '3.8.2', { - 'checksums': ['503ba60a41f3c61b8129c25de62c74dab29761d2e661d4addd106e2e02f1dcde'], - }), - ('gdata', '2.18.0', { - 'checksums': ['4b287f59f5bbf5fcbf18db16477852faac4a605b10c5284c46b93fa6e9918d7f'], - }), - ('GSA', '1.03.1', { - 'checksums': ['e192d4383f53680dbd556223ea5f8cad6bae62a80a337ba5fd8d05a8aee6a917'], - }), - ('highr', '0.8', { - 'checksums': ['4bd01fba995f68c947a99bdf9aca15327a5320151e10bd0326fad50a6d8bc657'], - }), - ('infotheo', '1.2.0', { - 'checksums': ['9b47ebc3db5708c88dc014b4ffec6734053a9c255a9241fcede30fec3e63aaa3'], - }), - ('lars', '1.2', { - 'checksums': ['64745b568f20b2cfdae3dad02fba92ebf78ffee466a71aaaafd4f48c3921922e'], - }), - ('lazy', '1.2-16', { - 'checksums': ['c796c8b987ed1bd9dfddd593e17312ed681fc4fa3a1ecfe51da2def0ac1e50df'], - }), - ('kernlab', '0.9-29', { - 'checksums': ['c3da693a0041dd34f869e7b63a8d8cf7d4bc588ac601bcdddcf7d44f68b3106f'], - }), - ('mime', '0.9', { - 'checksums': ['2ccf97d2940a09539dc051c7a9a1aee90ef04b34e9bc6c0b64b4435fb3c2fa80'], - }), - ('xfun', '0.13', { - 'checksums': ['a3da8d53b74ae58bb0f121177dcf3caf312c65fc181c18f168abd59afac33e0e'], - }), - ('markdown', '1.1', { - 'checksums': ['8d8cd47472a37362e615dbb8865c3780d7b7db694d59050e19312f126e5efc1b'], - }), - ('mlbench', '2.1-1', { - 'checksums': ['748141d56531a39dc4d37cf0a5165a40b653a04c507e916854053ed77119e0e6'], - }), - ('NLP', '0.2-0', { - 'checksums': ['fc64c80124c4e53b20f92b60c68e2fd33ee189653d0ceea410c32dd66d9e7075'], - }), - ('mclust', '5.4.6', { - 'checksums': ['d4ffcf36bf709ad42dccb2387263f67ca32012b0707f0ef6eda32730b5c286fc'], - }), - ('RANN', '2.6.1', { - 'checksums': ['b299c3dfb7be17aa41e66eff5674fddd2992fb6dd3b10bc59ffbf0c401697182'], - }), - ('rmeta', '3.0', { - 'checksums': ['b9f9d405935cffcd7a5697ff13b033f9725de45f4dc7b059fd68a7536eb76b6e'], - }), - ('segmented', '1.1-0', { - 'checksums': ['d081d0efaec708d717bf1248ba3df099876389c22796aad676655efb706e9d19'], - }), - ('som', '0.3-5.1', { - 'checksums': ['a6f4c0e5b36656b7a8ea144b057e3d7642a8b71972da387a7133f3dd65507fb9'], - }), - ('SuppDists', '1.1-9.5', { - 'checksums': ['680b67145c07d44e200275e08e48602fe19cd99fb106c05422b3f4a244c071c4'], - }), - ('stabledist', '0.7-1', { - 'checksums': ['06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69'], - }), - ('survivalROC', '1.0.3', { - 'checksums': ['1449e7038e048e6ad4d3f7767983c0873c9c7a7637ffa03a4cc7f0e25c31cd72'], - }), - ('pspline', '1.0-18', { - 'checksums': ['f71cf293bd5462e510ac5ad16c4a96eda18891a0bfa6447dd881c65845e19ac7'], - }), - ('timeDate', '3043.102', { - 'checksums': ['377cba03cddab8c6992e31d0683c1db3a73afa9834eee3e95b3b0723f02d7473'], - }), - ('longmemo', '1.1-2', { - 'checksums': ['7964e982287427dd58f98e1144e468ae0cbd572d25a4bea6ca9ae9c7522f3207'], - }), - ('ADGofTest', '0.3', { - 'checksums': ['9cd9313954f6ecd82480d373f6c5371ca84ab33e3f5c39d972d35cfcf1096846'], - }), - ('MASS', '7.3-51.6', { - 'checksums': ['e2035c47d8428b52afc02dd4f87ccb39a0085629932dfaff5f0a5d2c84ef3eee'], - }), - ('pixmap', '0.4-11', { - 'checksums': ['6fa010749a59cdf56aad9f81271473b7d55697036203f2cd5d81372bcded7412'], - }), - ('lattice', '0.20-41', { - 'checksums': ['54ca557f0cb33df60eb10b883c2ed2847e061ddd57ed9b5dd7695149609d57b5'], - }), - ('sp', '1.4-1', { - 'checksums': ['8f96f1a4827eea2cc02bb35c418ce2bdaf2d5ea47a214bcd4bc85f928b417039'], - }), - ('ade4', '1.7-15', { - 'checksums': ['3286fa7d8c372a5596e82c970c058e4cca1139a09935f14b238ba38aa9fdfdf6'], - }), - ('AlgDesign', '1.2.0', { - 'checksums': ['ff86c9e19505770520e7614970ad19c698664d08001ce888b8603e44c2a3b52a'], - }), - ('base64enc', '0.1-3', { - 'checksums': ['6d856d8a364bcdc499a0bf38bfd283b7c743d08f0b288174fba7dbf0a04b688d'], - }), - ('BH', '1.72.0-3', { - 'checksums': ['888ec1a3316bb69e1ba749b08ba7e0903ebc4742e3a185de8d148c13cddac8ab'], - }), - ('brew', '1.0-6', { - 'checksums': ['d70d1a9a01cf4a923b4f11e4374ffd887ad3ff964f35c6f9dc0f29c8d657f0ed'], - }), - ('Brobdingnag', '1.2-6', { - 'checksums': ['19eccaed830ce9d93b70642f6f126ac66722a98bbd48586899cc613dd9966ad4'], - }), - ('corpcor', '1.6.9', { - 'checksums': ['2e4fabd1d3936fecea67fa365233590147ca50bb45cf80efb53a10345a8a23c2'], - }), - ('longitudinal', '1.1.12', { - 'checksums': ['d4f894c38373ba105b1bdc89e3e7c1b215838e2fb6b4470b9f23768b84e603b5'], - }), - ('backports', '1.1.6', { - 'checksums': ['d0e8af477514d81b46cc777e0fa532835c1dc7eecd3c2432bb40228131bc199c'], - }), - ('checkmate', '2.0.0', { - 'checksums': ['0dc25b0e20c04836359df1885d099c6e4ad8ae0e585a9e4107f7ea945d9c6fa4'], - }), - ('cubature', '2.0.4', { - 'checksums': ['d97ce5eaac5e43910208e8274ddf6ff4f974d05688f0247ebccd807e24c2fe4a'], - }), - ('DEoptimR', '1.0-8', { - 'checksums': ['846911c1b2561a9fae73a8c60a21a5680963ebb0050af3c1f1147ae9a121e5ef'], - }), - ('digest', '0.6.25', { - 'checksums': ['15ccadb7b8bccaa221b6700bb549011719d0f4b38dbd3a1f29face3e019e2de5'], - }), - ('fastmatch', '1.1-0', { - 'checksums': ['20b51aa4838dbe829e11e951444a9c77257dcaf85130807508f6d7e76797007d'], - }), - ('ffbase', '0.12.8', { - 'checksums': ['18622f799641fb624dc274cdd31c52c9bd77c8f1f63fbb1dc636be80673b5356'], - }), - ('iterators', '1.0.12', { - 'checksums': ['96bf31d60ebd23aefae105d9b7790715e63327eec0deb2ddfb3d543994ea9f4b'], - }), - ('maps', '3.3.0', { - 'checksums': ['199afe19a4edcef966ae79ef802f5dcc15a022f9c357fcb8cae8925fe8bd2216'], - }), - ('nnls', '1.4', { - 'checksums': ['0e5d77abae12bc50639d34354f96a8e079408c9d7138a360743b73bd7bce6c1f'], - }), - ('sendmailR', '1.2-1', { - 'checksums': ['04feb08c6c763d9c58b2db24b1222febe01e28974eac4fe87670be6fb9bff17c'], - }), - ('dotCall64', '1.0-0', { - 'checksums': ['69318dc6b8aecc54d4f789c8105e672198363b395f1a764ebaeb54c0473d17ad'], - }), - ('spam', '2.5-1', { - 'checksums': ['d145881a0d48351ce88678a57862c0d0f716d98f3166f6338d954acacc51c067'], - }), - ('subplex', '1.6', { - 'checksums': ['0d05da1622fffcd20a01cc929fc6c2b7df40a8246e7018f7f1f3c175b774cbf9'], - }), - ('stringi', '1.4.6', { - 'checksums': ['633f67da5bd7bcb611764e4f478b0da050d22a715bbcbdd67aed0300dcef6fd6'], - }), - ('magrittr', '1.5', { - 'checksums': ['05c45943ada9443134caa0ab24db4a962b629f00b755ccf039a2a2a7b2c92ae8'], - }), - ('glue', '1.4.0', { - 'checksums': ['ea6c409f7141754baa090deba96cff270a11b185452cf9e6fb69cb148a9069c1'], - }), - ('stringr', '1.4.0', { - 'checksums': ['87604d2d3a9ad8fd68444ce0865b59e2ffbdb548a38d6634796bbd83eeb931dd'], - }), - ('evaluate', '0.14', { - 'checksums': ['a8c88bdbe4e60046d95ddf7e181ee15a6f41cdf92127c9678f6f3d328a3c5e28'], - }), - ('logspline', '2.1.15', { - 'checksums': ['dfe0c89a2ae219d121ea7af788dd994097f42d2ff39f4f86f5c4288a4ec0f71e'], - }), - ('ncbit', '2013.03.29', { - 'checksums': ['4480271f14953615c8ddc2e0666866bb1d0964398ba0fab6cc29046436820738'], - }), - ('permute', '0.9-5', { - 'checksums': ['d2885384a07497e8df273689d6713fc7c57a7c161f6935f3572015e16ab94865'], - }), - ('plotrix', '3.7-8', { - 'checksums': ['8ccd1f7e656413b9956cea614c986ce9cc61366deba356afb38cee6672a59480'], - }), - ('randomForest', '4.6-14', { - 'checksums': ['f4b88920419eb0a89d0bc5744af0416d92d112988702dc726882394128a8754d'], - }), - ('scatterplot3d', '0.3-41', { - 'checksums': ['4c8326b70a3b2d37126ca806771d71e5e9fe1201cfbe5b0d5a0a83c3d2c75d94'], - }), - ('SparseM', '1.78', { - 'checksums': ['d6b79ec881a10c91cb03dc23e6e783080ded9db4f2cb723755aa0d7d29a8b432'], - }), - ('tripack', '1.3-9', { - 'checksums': ['2b40670c23894b12e86a36fb2f42cab4728c8af8bd5338e94fbf86b7916a8c10'], - }), - ('R6', '2.4.1', { - 'checksums': ['26b0fd64827655c28c903f7ff623e839447387f3ad9b04939a02f41ac82faa3e'], - }), - ('irace', '3.4.1', { - 'checksums': ['7eea92ba42e6ba320fa8bdca3c53091ae42f26a0f097244f65e7e117f6d514b6'], - }), - ('rJava', '0.9-12', { - 'checksums': ['2248a8c73cacfecf75445ad0ebda4960409ec3f21afb180a1bc02a6de4057b0f'], - }), - ('RColorBrewer', '1.1-2', { - 'checksums': ['f3e9781e84e114b7a88eb099825936cc5ae7276bbba5af94d35adb1b3ea2ccdd'], - }), - ('png', '0.1-7', { - 'checksums': ['e269ff968f04384fc9421d17cfc7c10cf7756b11c2d6d126e9776f5aca65553c'], - }), - ('jpeg', '0.1-8.1', { - 'checksums': ['1db0a4976fd9b2ae27a37d3e856cca35bc2909323c7a40724846a5d3c18915a9'], - }), - ('latticeExtra', '0.6-29', { - 'checksums': ['6cadc31d56f73d926e2e8d72e43ae17ac03607a4d1a374719999a4a231e3df11'], - }), - ('Matrix', '1.2-18', { - 'checksums': ['f7ff018c2811946767ffd4c96d3987e859b82786ff72e1c211ab18bc03cb6119'], - }), - ('RcppArmadillo', '0.9.870.2.0', { - 'checksums': ['bb605d6702d49d447b18986d9a7864b27e5512630a6713bca67afbb5bb54f1e4'], - }), - ('plyr', '1.8.6', { - 'checksums': ['ea55d26f155443e9774769531daa5d4c20a0697bb53abd832e891b126c935287'], - }), - ('gtable', '0.3.0', { - 'checksums': ['fd386cc4610b1cc7627dac34dba8367f7efe114b968503027fb2e1265c67d6d3'], - }), - ('reshape2', '1.4.4', { - 'checksums': ['d88dcf9e2530fa9695fc57d0c78adfc5e361305fe8919fe09410b17da5ca12d8'], - }), - ('dichromat', '2.0-0', { - 'checksums': ['31151eaf36f70bdc1172da5ff5088ee51cc0a3db4ead59c7c38c25316d580dd1'], - }), - ('colorspace', '1.4-1', { - 'checksums': ['693d713a050f8bfecdb7322739f04b40d99b55aed168803686e43401d5f0d673'], - }), - ('munsell', '0.5.0', { - 'checksums': ['d0f3a9fb30e2b5d411fa61db56d4be5733a2621c0edf017d090bdfa5e377e199'], - }), - ('labeling', '0.3', { - 'checksums': ['0d8069eb48e91f6f6d6a9148f4e2dc5026cabead15dd15fc343eff9cf33f538f'], - }), - ('viridisLite', '0.3.0', { - 'checksums': ['780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af'], - }), - ('farver', '2.0.3', { - 'checksums': ['0e1590df79ec6078f10426411b96216b70568a4eaf3ffd84ca723add0ed8e5cc'], - }), - ('rlang', '0.4.5', { - 'checksums': ['cd1fac76f1a6ac26e07cc11dd08c55947fe152e4703daf8c94e3a650721b10a8'], - }), - ('lifecycle', '0.2.0', { - 'checksums': ['29746e8dee05d4e36f9c612e8c7a903a4f648a36b3b94c9776e518c38a412224'], - }), - ('scales', '1.1.0', { - 'checksums': ['1ee4a6fd1dbc5f52fe57dd8cce8caee4ce2fecb02d4e7d519e83f15aa45b2d03'], - }), - ('assertthat', '0.2.1', { - 'checksums': ['85cf7fcc4753a8c86da9a6f454e46c2a58ffc70c4f47cac4d3e3bcefda2a9e9f'], - }), - ('crayon', '1.3.4', { - 'checksums': ['fc6e9bf990e9532c4fcf1a3d2ce22d8cf12d25a95e4779adfa17713ed836fa68'], - }), - ('fansi', '0.4.1', { - 'checksums': ['3c69eec803a3827e5227f9cf084976eeb738b22c7eb7665bb5faa251bce41e09'], - }), - ('cli', '2.0.2', { - 'checksums': ['490834e5b80eb036befa0e150996bcab1c4d5d168c3d45209926e52d0d5413b6'], - }), - ('utf8', '1.1.4', { - 'checksums': ['f6da9cadfc683057d45f54b43312a359cf96ec2731c0dda18a8eae31d1e31e54'], - }), - ('zeallot', '0.1.0', { - 'checksums': ['439f1213c97c8ddef9a1e1499bdf81c2940859f78b76bc86ba476cebd88ba1e9'], - }), - ('ellipsis', '0.3.0', { - 'checksums': ['0bf814cb7a1f0ee1f2949bdc98752a0d535f2a9489280dd4d8fcdb10067ee907'], - }), - ('vctrs', '0.2.4', { - 'checksums': ['dcc8b6bfd2d951d48d338a3d4deaaabfee356c0ee43169a6d6b06ea78cfe4f97'], - }), - ('pillar', '1.4.3', { - 'checksums': ['5a8bc40bd836baab80f70fba268e3f530fb464a9268ad99f1c037380f83f560b'], - }), - ('pkgconfig', '2.0.3', { - 'checksums': ['330fef440ffeb842a7dcfffc8303743f1feae83e8d6131078b5a44ff11bc3850'], - }), - ('tibble', '3.0.1', { - 'checksums': ['154552cfb767e0bd48ef3fc61df8286ce52205e970815c5bc2560f41eceea79e'], - }), - ('lazyeval', '0.2.2', { - 'checksums': ['d6904112a21056222cfcd5eb8175a78aa063afe648a562d9c42c6b960a8820d4'], - }), - ('withr', '2.3.0', { - 'checksums': ['2cc03c9947d424717e94f301a0ab7d97eb2079eea5c6a0a3cdf2da32aedc67a0'], - }), - ('nlme', '3.1-147', { - 'checksums': ['efc8d82e5758c4696458f647f2b8bb1776fdbff6c8eaad3fe3c0573a3c1f0ce7'], - }), - ('mgcv', '1.8-31', { - 'checksums': ['736de462a0ac43a6ed38cd57dfb0ba2942c941dfbb538128782727ab7125c3c5'], - }), - ('rprojroot', '1.3-2', { - 'checksums': ['df5665834941d8b0e377a8810a04f98552201678300f168de5f58a587b73238b'], - }), - ('desc', '1.2.0', { - 'checksums': ['e66fb5d4fc7974bc558abcdc107a1f258c9177a29dcfcf9164bc6b33dd08dae8'], - }), - ('ps', '1.3.2', { - 'checksums': ['89f2456af6c7ffbc151aeccb620584ca26015b7d6188eb188488c7c4afc14704'], - }), - ('processx', '3.4.2', { - 'checksums': ['94a0ffc632759be85d13b7b11ed006adf6c08c2d9cd99612cd0372833bd75c09'], - }), - ('callr', '3.4.3', { - 'checksums': ['01b7277f20c1d662c6bebbfa2798d179922b36d4148b4298853579aeda0382b5'], - }), - ('prettyunits', '1.1.1', { - 'checksums': ['9a199aa80c6d5e50fa977bc724d6e39dae1fc597a96413053609156ee7fb75c5'], - }), - ('pkgbuild', '1.0.7', { - 'checksums': ['29bb38a38202ba780d2d46aeca0a6e2f052653e4a83891ec38d19bebd131a971'], - }), - ('rstudioapi', '0.13', { - 'checksums': ['aac35bbdcb4a8e8caba943bc8a2b98120e8940b80cd1020224bb1a26ff776d8b'], - }), - ('pkgload', '1.0.2', { - 'checksums': ['3186564e690fb05eabe76e1ac0bfd4312562c3ac8794b29f8850399515dcf27c'], - }), - ('praise', '1.0.0', { - 'checksums': ['5c035e74fd05dfa59b03afe0d5f4c53fbf34144e175e90c53d09c6baedf5debd'], - }), - ('testthat', '2.3.2', { - 'checksums': ['1a268d8df07f7cd8d282d03bb96ac2d96a24a95c9aa52f4cca5138a09dd8e06c'], - }), - ('isoband', '0.2.1', { - 'checksums': ['18883606bea8352e04a4618bea4e5c9833269e73a46b50bc006dddf4c8b6b4d9'], - }), - ('ggplot2', '3.3.0', { - 'checksums': ['320e3c76fe0d0397e29f4782bf85af3647518154b3900a39fd18cf024c554148'], - }), - ('pROC', '1.16.2', { - 'checksums': ['b68b960ed9a2cdea7976943649082c3945e370d14115b7adbce440fc7f51fc2a'], - }), - ('quadprog', '1.5-8', { - 'checksums': ['22128dd6b08d3516c44ff89276719ad4fe46b36b23fdd585274fa3a93e7a49cd'], - }), - ('BB', '2019.10-1', { - 'checksums': ['04d0b6ce6e5f070b109478a6005653dbe78613bb4e3ea4903203d851b5d3c94d'], - }), - ('BBmisc', '1.11', { - 'checksums': ['1ea48c281825349d8642a661bb447e23bfd651db3599bf72593bfebe17b101d2'], - }), - ('fail', '1.3', { - 'checksums': ['ede8aa2a9f2371aff5874cd030ac625adb35c33954835b54ab4abf7aeb34d56d'], - }), - ('rlecuyer', '0.3-5', { - 'checksums': ['4723434ff7624d4f404a6854ffa0673fc43daa46f58f064dbeeaa17da28ab626'], - }), - ('snow', '0.4-3', { - 'checksums': ['8512537daf334ea2b8074dbb80cf5e959a403a78d68bc1e97664e8a4f64576d8'], - }), - ('tree', '1.0-40', { - 'checksums': ['ffab16382d7ed5b76529801ab26b4970363b2072231c6a87330326298ce626e7'], - }), - ('pls', '2.7-2', { - 'checksums': ['67e91e36dbebeb2f2d9c9b88f310dc00f70de275e5f382f392e72dd36af42b88'], - }), - ('class', '7.3-17', { - 'checksums': ['be1f85b6df7556db93f50cb08106aac6620d4b5bb3fee846422863a022461313'], - }), - ('e1071', '1.7-3', { - 'checksums': ['bb2dba526b673ec3a573befe365e3500b773593f0384fd6694e0835496bcc25d'], - }), - ('nnet', '7.3-14', { - 'checksums': ['5d1b9e9764d74d16c651f18f949aa4e9e2995ba64633cbfa2c6a7355ae30f4af'], - }), - ('minqa', '1.2.4', { - 'checksums': ['cfa193a4a9c55cb08f3faf4ab09c11b70412523767f19894e4eafc6e94cccd0c'], - }), - ('RcppEigen', '0.3.3.7.0', { - 'checksums': ['62ea627284425bfdb56613bc315cca492ed3483a56a03c1f9dc9821a25c3e8ac'], - }), - ('MatrixModels', '0.4-1', { - 'checksums': ['fe878e401e697992a480cd146421c3a10fa331f6b37a51bac83b5c1119dcce33'], - }), - ('quantreg', '5.55', { - 'checksums': ['cbe1541409aed8222a41043958ab9c352b84dba4e0766b54bf1eac59d2454cfe'], - }), - ('robustbase', '0.93-6', { - 'checksums': ['ea1463a646a0aad0cc6f48e011c8baf990178f1228e0759be63259123b3a24b3'], - }), - ('zoo', '1.8-7', { - 'checksums': ['9e072ddc8f245adcdeb230cbf0c818db9c028c320894f48211758da2bf2085f0'], - }), - ('lmtest', '0.9-37', { - 'checksums': ['ddc929f94bf055974832fa4a20fdd0c1eb3a84ee11f716c287936f2141d5ca0a'], - }), - ('vcd', '1.4-7', { - 'checksums': ['ec89b2ad202b89d70344b49d9410ddc8a15dced00462ed7e6f9b516811325299'], - }), - ('snowfall', '1.84-6.1', { - 'checksums': ['5c446df3a931e522a8b138cf1fb7ca5815cc82fcf486dbac964dcbc0690e248d'], - }), - ('rpart', '4.1-15', { - 'checksums': ['2b8ebe0e9e11592debff893f93f5a44a6765abd0bd956b0eb1f70e9394cfae5c'], - }), - ('survival', '3.1-12', { - 'checksums': ['b62ed66eb646f3df13f7e9bf6571e3bfecae128c66491e174c8833cbef1bf21f'], - }), - ('bindr', '0.1.1', { - 'checksums': ['7c785ca77ceb3ab9282148bcecf64d1857d35f5b800531d49483622fe67505d0'], - }), - ('plogr', '0.2.0', { - 'checksums': ['0e63ba2e1f624005fe25c67cdd403636a912e063d682eca07f2f1d65e9870d29'], - }), - ('bindrcpp', '0.2.2', { - 'checksums': ['48130709eba9d133679a0e959e49a7b14acbce4f47c1e15c4ab46bd9e48ae467'], - }), - ('purrr', '0.3.4', { - 'checksums': ['23ebc93bc9aed9e7575e8eb9683ff4acc0270ef7d6436cc2ef4236a9734840b2'], - }), - ('tidyselect', '1.0.0', { - 'checksums': ['fe761766d03af86d04da9a9a7800e9c8271d2cb067776cfb817d853725d59caf'], - }), - ('dplyr', '0.8.5', { - 'checksums': ['5750d3bf4bda7b5448e08af264ed183b4f7bd0c59a9d828fe9dd399b14590218'], - }), - ('tidyr', '1.0.2', { - 'checksums': ['2403dd2f4d350d1ecb449be5d61cdccf6655572a807409325f9122c716924e69'], - }), - ('mnormt', '1.5-6', { - 'checksums': ['2951fcc9711d577d7d5577d313875c4ed9f7a7a06df8c3b62fa27c231f021e4d'], - }), - ('foreign', '0.8-79', { - 'checksums': ['af36b6945afbf849543ef7e432e27fecb66d5a8b3932b2428e59c2392699b9b4'], - }), - ('psych', '1.9.12.31', { - 'checksums': ['25e71dbe0b0d8211e7a9c8439c1c205a25fd571a0f95a89b8425b87b95b9290a'], - }), - ('generics', '0.0.2', { - 'checksums': ['71b3d1b719ce89e71dd396ac8bc6aa5f1cd99bbbf03faff61dfbbee32fec6176'], - }), - ('broom', '0.5.6', { - 'checksums': ['0b06f670ace7349b95a71313c360dbded2428d65d17eaacccc93750196964335'], - }), - ('nloptr', '1.2.2.1', { - 'checksums': ['d037bea484725cf6cbc069eee17db17f1bc8fc4edc1f1ca16cf6c34bc21610ae'], - }), - ('boot', '1.3-25', { - 'checksums': ['464835fcb453072346ce49e4ae318e04c9dba682349be49db616623b6088fbbe'], - }), - ('statmod', '1.4.34', { - 'checksums': ['1a81c286e099d2395e39f47f1e87295b8e1d64b64ec55bb09bc817ae8879747a'], - }), - ('lme4', '1.1-23', { - 'checksums': ['99d542b1f78fae33a64f1b8eec33b7a4532a8d82d2ac47bdb2838248f14c0262'], - }), - ('ucminf', '1.1-4', { - 'checksums': ['a2eb382f9b24e949d982e311578518710f8242070b3aa3314a331c1e1e7f6f07'], - }), - ('numDeriv', '2016.8-1.1', { - 'checksums': ['d8c4d19ff9aeb31b0c628bd4a16378e51c1c9a3813b525469a31fe89af00b345'], - }), - ('ordinal', '2019.12-10', { - 'checksums': ['7a41e7b7e852a8fa3e911f8859d36e5709ccec5ca42ee3de14a813b7aaac7725'], - }), - ('jomo', '2.6-10', { - 'checksums': ['4063d48e259e936dc0bd9dc616a09043f695703848cb1bf8faa08c07922034cd'], - }), - ('hms', '0.5.3', { - 'checksums': ['4e2b67c8cf65fe86179f24f42d82b3ca9377d5907837bda98b4fc6c2318853ad'], - }), - ('clipr', '0.7.0', { - 'checksums': ['03a4e4b72ec63bd08b53fe62673ffc19a004cc846957a335be2b30d046b8c2e2'], - }), - ('readr', '1.3.1', { - 'checksums': ['33f94de39bb7f2a342fbb2bd4e5afcfec08798eac39672ee18042ac0b349e4f3'], - }), - ('forcats', '0.5.0', { - 'checksums': ['8f960e789333ec597ddf2d653a64e330f03b86f465e9b71f6779f227355d90c4'], - }), - ('haven', '2.2.0', { - 'checksums': ['199ee9b14e1ff70a0b0c3b9ce33dfdec8ed3b5e857a2a36bfb82e78a7b352d3d'], - }), - ('pan', '1.6', { - 'checksums': ['adc0df816ae38bc188bce0aef3aeb71d19c0fc26e063107eeee71a81a49463b6'], - }), - ('mitml', '0.3-7', { - 'checksums': ['c6f796d0059f1b093b599a89d955982fa257de9c45763ecc2cbbce10fdec1e7b'], - }), - ('mice', '3.8.0', { - 'checksums': ['04bc18d6cf225d626d4a5d52dd98a30a19662ae14263c83b51744efce25e7ec5'], - }), - ('urca', '1.3-0', { - 'checksums': ['621cc82398e25b58b4a16edf000ed0a1484d9a0bc458f734e97b6f371cc76aaa'], - }), - ('fracdiff', '1.5-1', { - 'checksums': ['b8103b32a4ca3a59dda1624c07da08ecd144c7a91a747d1f4663e99421950eb6'], - }), - ('logistf', '1.23', { - 'checksums': ['5adb22a40569883395dc048c877f849dd08d07582a991f1b160f0338f0b13838'], - }), - ('akima', '0.6-2', { - 'checksums': ['61da3e556553eea6d1f8db7c92218254441da31e365bdef82dfe5da188cc97ce'], - }), - ('bitops', '1.0-6', { - 'checksums': ['9b731397b7166dd54941fb0d2eac6df60c7a483b2e790f7eb15b4d7b79c9d69c'], - }), - ('mixtools', '1.2.0', { - 'checksums': ['ef033ef13625209065d26767bf70d129972e6808927f755629f1d70a118b9023'], - }), - ('cluster', '2.1.0', { - 'checksums': ['eaf955bef8f616ea563351ec7f597c445aec43e65991ca975e382ef1fd70aa14'], - }), - ('gclus', '1.3.2', { - 'checksums': ['9cc61cdff206c11213e73afca3d570a7234250cf6044a9202c2589932278e0b3'], - }), - ('coda', '0.19-3', { - 'checksums': ['d3df1fc848bcf1af8fae13d61eeab60e99a3d4b4db384bec4326f909f502c5d6'], - }), - ('codetools', '0.2-16', { - 'checksums': ['f67a66175cb5d8882457d1e9b91ea2f16813d554fa74f80c1fd6e17cf1877501'], - }), - ('foreach', '1.5.0', { - 'checksums': ['1af9a713418c4cdeb49c4194f6482a7ee8ae4959b995a958a8a8a19ec8b60415'], - }), - ('doMC', '1.3.6', { - 'checksums': ['2977fc9e2dc54d85d45b4a36cd286dff72834fbc73f38b6ee45a6eb8557fc9b2'], - }), - ('DBI', '1.1.0', { - 'checksums': ['a96db7fa39a58f1ed34c6e78d8f5f7e4cf0882afb301323b5c6975d6729203e4'], - }), - ('gam', '1.16.1', { - 'checksums': ['80d04102c6152143e8ed364f91eb312e413f73b8fcab7cf15d677867a16e74b9'], - }), - ('gamlss.data', '5.1-4', { - 'checksums': ['0d3777d8c3cd76cef273aa6bde40a91688719be401195ed9bfd1e85bd7d5eeb5'], - }), - ('gamlss.dist', '5.1-6', { - 'checksums': ['3ff0e36dfd7ddea109410c539375a408af8f8f865dd8865555e41fb0402720dd'], - }), - ('gamlss', '5.1-6', { - 'checksums': ['b9c9a21343ed7777c239d8c5ad63b6f6efa0254bfcd6eaf66a74c319268b6799'], - }), - ('gamlss.tr', '5.1-0', { - 'checksums': ['f9e1c4935d8876bfc80dddc0a9bc2c82b4deeda9482df208297a84a638a4a9df'], - }), - ('hwriter', '1.3.2', { - 'checksums': ['6b3531d2e7a239be9d6e3a1aa3256b2745eb68aa0bdffd2076d36552d0d7322b'], - }), - ('KernSmooth', '2.23-17', { - 'checksums': ['2b3d73fe15db46dbc2f6e3b043baadb7633c46bfa4a66d9eea5aed633058e685'], - }), - ('xts', '0.12-0', { - 'checksums': ['df11e6dad7cf0a266702988fa6127aaf72437da743ca40e9abcd9e6b3d628c60'], - }), - ('curl', '4.3', { - 'checksums': ['7406d485bb50a6190e3ed201e3489063fd249b8b3b1b4f049167ac405a352edb'], - }), - ('TTR', '0.23-6', { - 'checksums': ['afc10a89d3a18f121ddf0f7256408eeb05cc64e18ee94e654bfa803e5415e265'], - }), - ('quantmod', '0.4.17', { - 'checksums': ['0aff56f276f8e347c56356060f7320913f0e417f1c5411c49f0865ca732044eb'], - }), - ('mvtnorm', '1.1-0', { - 'checksums': ['8112e12eb11f5db2ff145893f48426520e669be99b87889457dd2c4f2636cb5d'], - }), - ('pcaPP', '1.9-73', { - 'checksums': ['ca4566b0babfbe83ef9418283b08a12b3420dc362f93c6562f265df7926b53fc'], - }), - ('SQUAREM', '2020.2', { - 'checksums': ['6e3373bb5190ade222d676dae9f1aad32feddd50e97499fab7d66fd94752dac8'], - }), - ('lava', '1.6.7', { - 'checksums': ['63f7a8454cfc70739877812481a987deea33d4235f05234d0dd0ed8bd6eadf39'], - }), - ('prodlim', '2019.11.13', { - 'checksums': ['6809924f503a14681de84730489cdaf9240d7951c64f5b98ca37dc1ce7809b0f'], - }), - ('pscl', '1.5.5', { - 'checksums': ['054c9b88a991abdec3338688f58e81b6ba55f91edb988621864b24fd152fee6f'], - }), - ('memoise', '1.1.0', { - 'checksums': ['b276f9452a26aeb79e12dd7227fcc8712832781a42f92d70e86040da0573980c'], - }), - ('bit64', '0.9-7', { - 'checksums': ['7b9aaa7f971198728c3629f9ba1a1b24d53db5c7e459498b0fdf86bbd3dff61f'], - }), - ('blob', '1.2.1', { - 'checksums': ['ef54bc7a9646c1b73f4d2f60c869b4f1940bc3505874175114297ad7772d8bea'], - }), - ('RSQLite', '2.2.0', { - 'checksums': ['000d126fda069cd97d1a9f9df16cd267ca76d8b96c290ca9b8c32d9e91d468d4'], - }), - ('data.table', '1.12.8', { - 'checksums': ['d3a75f3a355ff144cc20a476041617e21fcf2a9f79265fd9bbd4693f3671f9dc'], - }), - ('BatchJobs', '1.8', { - 'checksums': ['35cc2dae31994b1df982d11939509ce965e12578418c4fbb8cd7a422afd6e4ff'], - }), - ('sandwich', '2.5-1', { - 'checksums': ['dbef6f4d12b83e166f9a2508b7c732b04493641685d6758d29f3609e564166d6'], - }), - ('sfsmisc', '1.1-6', { - 'checksums': ['57b22cdd713e71e9235ff1ace8cdf73564bfdcee4b018f3d7cde6fb35493db11'], - }), - ('spatial', '7.3-12', { - 'checksums': ['7639039ee7407bd088e1b253376b2cb4fcdf4cc9124d6b48e4119d5cda872d63'], - }), - ('VGAM', '1.1-3', { - 'checksums': ['0c9ff51b9ee76d8b182a19b61f278970ad6d421c0206bfef40b7413b7acb94c3'], - }), - ('waveslim', '1.8.2', { - 'checksums': ['133c4f7a027282742fe99b583ca65f178fc7a3df2ce75cb4d60650f0a1dd7145'], - }), - ('xtable', '1.8-4', { - 'checksums': ['5abec0e8c27865ef0880f1d19c9f9ca7cc0fd24eadaa72bcd270c3fb4075fd1c'], - }), - ('profileModel', '0.6.0', { - 'checksums': ['a829ceec29c817d6d15947b818e28f9cf5a188a231b9b5d0a75018388887087b'], - }), - ('brglm', '0.6.2', { - 'checksums': ['c2af432a43ccf37e9de50317f770b9703a4c80b4ef79ec40aa8e7ec3987e3631'], - }), - ('deSolve', '1.28', { - 'checksums': ['4c55ef4cae841df91034382d277b483985af120240f87af587ff82177fdb5a49'], - }), - ('tseriesChaos', '0.1-13.1', { - 'checksums': ['23cb5fea56409a305e02a523ff8b7642ec383942d415c9cffdc92208dacfd961'], - }), - ('tseries', '0.10-47', { - 'checksums': ['202377df56806fe611c2e12c4d9732c71b71220726e2defa7e568d2b5b62fb7b'], - }), - ('fastICA', '1.2-2', { - 'checksums': ['32223593374102bf54c8fdca7b57231e4f4d0dd0be02d9f3500ad41b1996f1fe'], - }), - ('R.methodsS3', '1.8.0', { - 'checksums': ['e005f5ee21bfb6fbbf415de957a9ca0ed6e9f2800b95d98d76a9acb3c14185a5'], - }), - ('R.oo', '1.23.0', { - 'checksums': ['f5124ce3dbb0a62e8ef1bfce2de2d1dc2f776e8c48fd8cac358f7f5feb592ea1'], - }), - ('jsonlite', '1.6.1', { - 'checksums': ['74921dd249857a23afabc1ad1485a63a48828e57f240f0619deb04c60f883377'], - }), - ('sys', '3.3', { - 'checksums': ['a6217c2a7240ed68614006f392c6d062247dab8b9b0d498f95e947110df19b93'], - }), - ('askpass', '1.1', { - 'checksums': ['db40827d1bdbb90c0aa2846a2961d3bf9d76ad1b392302f9dd84cc2fd18c001f'], - }), - ('openssl', '1.4.1', { - 'checksums': ['f7fbecc75254fc43297a95a4338c674ab9ba2ec056b59e027d16d23122161fc6'], - }), - ('httr', '1.4.1', { - 'checksums': ['675c7e07bbe82c48284ee1ab929bb14a6e653abae2860d854dc41a3c028de156'], - }), - ('cgdsr', '1.3.0', { - 'checksums': ['4aa2a3564cee2449c3ff39ab2ad631deb165d4c78b8107e0ff77a9095340cc1f'], - }), - ('R.utils', '2.9.2', { - 'checksums': ['ac6b3b8e814fbb855c38fbdb89a4f0cf0ed65ce7fa308445bd74107fbc0d32cf'], - }), - ('R.matlab', '3.6.2', { - 'checksums': ['1ba338f470a24b7f6ef68cadbd04eb468ead4a689f263d2642408ad591b786bb'], - }), - ('gridExtra', '2.3', { - 'checksums': ['81b60ce6f237ec308555471ae0119158b115463df696d2eca9b177ded8988e3b'], - }), - ('gbm', '2.1.5', { - 'checksums': ['06fbde10639dfa886554379b40a7402d1f1236a9152eca517e97738895a4466f'], - }), - ('Formula', '1.2-3', { - 'checksums': ['1411349b20bd09611a9fd0ee6d15f780c758ad2b0e490e908facb49433823872'], - }), - ('acepack', '1.4.1', { - 'checksums': ['82750507926f02a696f6cc03693e8d4a5ee7e92500c8c15a16a9c12addcd28b9'], - }), - ('proto', '1.0.0', { - 'checksums': ['9294d9a3b2b680bb6fac17000bfc97453d77c87ef68cfd609b4c4eb6d11d04d1'], - }), - ('chron', '2.3-55', { - 'checksums': ['0f731fb9e79818cd95b5fa843cc233616a5f8e5dd39a1ae8048f5a1fd8d1eb25'], - }), - ('viridis', '0.5.1', { - 'checksums': ['ddf267515838c6eb092938133035cee62ab6a78760413bfc28b8256165701918'], - }), - ('yaml', '2.2.1', { - 'checksums': ['1115b7bc2a397fa724956eec916df5160c600c99a3be186d21558dd38d782783'], - }), - ('htmltools', '0.4.0', { - 'checksums': ['5b18552e1183b1b90b5cca8e7f95b57e8124c9d517b22aa64783b829513b811a'], - }), - ('htmlwidgets', '1.5.1', { - 'checksums': ['d42e59144552d9b4131f11ddd6169dfb9bd538c7996669a09acbdb400d18d781'], - }), - ('knitr', '1.28', { - 'checksums': ['05ee01da31d715bf24793efb3e4ef3bb3101ef1e1ab2d760c645fc5b9d40232a'], - }), - ('htmlTable', '1.13.3', { - 'checksums': ['d459c43675f6ee0a1ae8232ea8819b2a842e795a833b28127081fa344d09393d'], - }), - ('Hmisc', '4.4-0', { - 'checksums': ['f16ecf4c5ee2202d51f426282a54f8000ffa8b9747c3e910205f34f878556ec7'], - }), - ('fastcluster', '1.1.25', { - 'checksums': ['f3661def975802f3dd3cec5b2a1379f3707eacff945cf448e33aec0da1ed4205'], - }), - ('registry', '0.5-1', { - 'checksums': ['dfea36edb0a703ec57e111016789b47a1ba21d9c8ff30672555c81327a3372cc'], - }), - ('bibtex', '0.4.2.2', { - 'checksums': ['073887668f16568d9fafaa5862ed7d3d866f40cbc1a028371b038cdbbe9c1090'], - }), - ('pkgmaker', '0.31.1', { - 'checksums': ['1702b8e2fa9751fa67c3031468273eaa28358d27ba2df98a4fbb08df80500f64'], - }), - ('rngtools', '1.5', { - 'checksums': ['8274873b73f7acbe0ce007e62893bf4d369d2aab8768754a60da46b3f078f575'], - }), - ('doParallel', '1.0.15', { - 'checksums': ['71ad7ea69616468996aefdd8d02a4a234759a21ddde9ed1657e3c537145cd86e'], - }), - ('gridBase', '0.4-7', { - 'checksums': ['be8718d24cd10f6e323dce91b15fc40ed88bccaa26acf3192d5e38fe33e15f26'], - }), - ('irlba', '2.3.3', { - 'checksums': ['6ee233697bcd579813bd0af5e1f4e6dd1eea971e8919c748408130d970fef5c0'], - }), - ('igraph', '1.2.5', { - 'checksums': ['0cdd675b2e6a31f54bd5ba4530a26f00996eb310ceea93263c6fc4ba9e0fdf88'], - }), - ('GeneNet', '1.2.14', { - 'checksums': ['76f4d1a5954b3060d95017b0108b2f0936fdf38c15e5c1fd051cfc5c82ccb031'], - }), - ('ape', '5.3', { - 'checksums': ['08b0df134c523feb00a86896d1aa2a43f0f0dab20a53bc6b5d6268d867988b23'], - }), - ('RJSONIO', '1.3-1.4', { - 'checksums': ['54142c931e15eca278a02dad5734026bb49d960471eb085008af825352953190'], - }), - ('caTools', '1.18.0', { - 'checksums': ['0343698a41e8b516769af0433ac2e52a7df9be709b7f78c1825e88e1a37f3378'], - }), - ('gplots', '3.0.3', { - 'checksums': ['d776d3ee9e284085f6ec1b7717afcd5c4addad60d2f1f4f220cda788c8ac4643'], - }), - ('ROCR', '1.0-7', { - 'checksums': ['e7ef710f847e441a48b20fdc781dbc1377f5a060a5ee635234053f7a2a435ec9'], - }), - ('later', '1.0.0', { - 'checksums': ['277b9848ef2e5e1ac7257aefeb58f6b20cca17693460e7c4eee0477de456b287'], - }), - ('promises', '1.1.0', { - 'checksums': ['c8ea0f3e3256cf3010439b3a6111966db419c3dcff9a561e73caf8bd65f38006'], - }), - ('httpuv', '1.5.2', { - 'checksums': ['93b32be974e0f531a3cb343685165c0caadf30cfea07683f8d69302a34045d8d'], - }), - ('rjson', '0.2.20', { - 'checksums': ['3a287c1e5ee7c333ed8385913c0a307daf99335fbdf803e9dcca6e3d5adb3f6c'], - }), - ('sourcetools', '0.1.7', { - 'checksums': ['47984406efb3b3face133979ccbae9fefb7360b9a6ca1a1c11473681418ed2ca'], - }), - ('fastmap', '1.0.1', { - 'checksums': ['4778b05dfebd356f8df980dfeff3b973a72bca14898f870e5c40c1d84db9faec'], - }), - ('shiny', '1.4.0.2', { - 'checksums': ['dca6ac83d03266a3d930273e7b821afa4a574f02ef89f963672972c2a2f5e064'], - }), - ('seqinr', '3.6-1', { - 'checksums': ['c44fc8922ef410da3c3b5ca117cdbec55ccb546c9e6d96c01ede44398dfa6048'], - }), - ('LearnBayes', '2.15.1', { - 'checksums': ['9b110858456523ca0b2a63f22013c4e1fbda6674b9d84dc1f4de8bffc5260532'], - }), - ('deldir', '0.1-25', { - 'checksums': ['f0a2f2eb511e8e99423a8f9b6ebc9073967d79629db4c86824eb0696d1a6af4d'], - }), - ('gmodels', '2.18.1', { - 'checksums': ['626140a34eb8c53dd0a06511a76c71bc61c48777fa76fcc5e6934c9c276a1369'], - }), - ('expm', '0.999-4', { - 'checksums': ['58d06427a08c9442462b00a5531e2575800be13ed450c5a1546261251e536096'], - }), - ('raster', '3.1-5', { - 'checksums': ['db6622d55bb9e5c4a8d8e59887a802b35fc07dcee946800453bc5e1901c01a04'], - }), - ('spData', '0.3.5', { - 'checksums': ['901e840ba42e945d51ea0dfe815fece44dd92a8e74a2356345ccbb2577908926'], - }), - ('units', '0.6-6', { - 'checksums': ['d0b6c76afb9aa5d7a0eaae05e6fc1bd2bb9d62d4c43e986b4782d6e5c2efa687'], - }), - ('classInt', '0.4-3', { - 'checksums': ['9ede7a2a7a6b6c114919a3315a884fb592e33b037a50a4fe45cbd4fe2fc434ac'], - }), - ('vegan', '2.5-6', { - 'checksums': ['b3c00aceb3db38101960515658e2b9ec1552439c3ed4e26e72989f18eccbc03c'], - }), - ('progress', '1.2.2', { - 'checksums': ['b4a4d8ed55db99394b036a29a0fb20b5dd2a91c211a1d651c52a1023cc58ff35'], - }), - ('rncl', '0.8.4', { - 'checksums': ['6b19d0dd9bb08ecf99766be5ad684bcd1894d1cd9291230bdd709dbd3396496b'], - }), - ('XML', '3.99-0.3', { - 'checksums': ['81b7a76308f3b7378dff525eff0180bba73b31117483a26cc3aa172d15c7f753'], - }), - ('tinytex', '0.22', { - 'checksums': ['6bbcbc907cad14bc0a583670bad1d9648d1f1cedd364354042aee83bb6302e69'], - }), - ('rmarkdown', '2.1', { - 'checksums': ['ef450e21206c454aa78eeca9023bbc78d1b2939e0b4bed9fdec9f2bf81ee455d'], - }), - ('reshape', '0.8.8', { - 'checksums': ['4d5597fde8511e8fe4e4d1fd7adfc7ab37ff41ac68c76a746f7487d7b106d168'], - }), - ('xml2', '1.3.2', { - 'checksums': ['df22f9e7e3189d8c9b8804eaf0105324fdac983cffe743552f6d76613600a4cf'], - }), - ('triebeard', '0.3.0', { - 'checksums': ['bf1dd6209cea1aab24e21a85375ca473ad11c2eff400d65c6202c0fb4ef91ec3'], - }), - ('urltools', '1.7.3', { - 'checksums': ['6020355c1b16a9e3956674e5dea9ac5c035c8eb3eb6bbdd841a2b5528cafa313'], - }), - ('httpcode', '0.3.0', { - 'checksums': ['593a030a4f94c3df8c15576837c17344701bac023ae108783d0f06c476062f76'], - }), - ('crul', '0.9.0', { - 'checksums': ['a7b42c69ca31648a419b93c618d32d0613f3ea053e45d584e84ef422ccf531c0'], - }), - ('bold', '0.9.0', { - 'checksums': ['45e844a83f4545a2f84887e36db83113da824a8673fa039f067a3bd7ee82ed5e'], - }), - ('rredlist', '0.6.0', { - 'checksums': ['bed33680f4e36f0f357d5785b631ae91232c8593a7517f1c0a4199d4e1e28332'], - }), - ('rentrez', '1.2.2', { - 'checksums': ['e5cb4265fd06d2ed0e11da3667ba79f7f2c8816005ba72cf5f53b8cf02dc193e'], - }), - ('rotl', '3.0.10', { - 'checksums': ['38b4679fe2d5407f7d0799d624ae8ea5d73ec0b6531b0e3d48246dea5575073a'], - }), - ('solrium', '1.1.4', { - 'checksums': ['5fccdb455746493c56e4df91f01ea9e89cdf0d67cfa5f958ca246b9207d20375'], - }), - ('ritis', '0.9.0', { - 'checksums': ['4abbe6c860fd3e465116573c9b2f119dbbd0046646844551523188ded63f0f6c'], - }), - ('worrms', '0.4.0', { - 'checksums': ['8480c56a4412662a383103fef68e73fcf14e94fcb878c25df8c6d5a8c0146059'], - }), - ('natserv', '0.4.0', { - 'checksums': ['ba7ef96290b4713e46197cc872d5400710086dc3668717d67995ee3de3d19c87'], - }), - ('WikipediR', '1.5.0', { - 'checksums': ['f8d0e6f04fb65f7ad9c1c068852a6a8b699ffe8d39edf1f3fa07d32d087e8ff0'], - }), - ('WikidataR', '1.4.0', { - 'checksums': ['64b1d53d7023249b73a77a7146adc3a8957b7bf3d808ebd6734795e9f58f4b2a'], - }), - ('wikitaxa', '0.3.0', { - 'checksums': ['10dbabac6c56c1d0f33a66ff9b4f48b0bcb470711808a86863b48dc1140ec86c'], - }), - ('phangorn', '2.5.5', { - 'checksums': ['c58dc1ace26cb4358619a15da3ea4765dbdde1557acccc5103c85589a7571346'], - }), - ('taxize', '0.9.95', { - 'checksums': ['8a27d81678e60f67082d9b0b3cd104fe531ea2be2d9073a20cab016259228834'], - }), - ('uuid', '0.1-4', { - 'checksums': ['98e0249dda17434bfa209c2058e9911e576963d4599be9f7ea946e664f8ca93e'], - }), - ('RNeXML', '2.4.3', { - 'checksums': ['bf801c93da4d5a59c92d17c15c04072e1ba4f72c50461a1e1eda2d446109a925'], - }), - ('phylobase', '0.8.10', { - 'checksums': ['5a44380ff49bab333a56f6f96157324ade8afb4af0730e013194c4badb0bf94b'], - }), - ('magick', '2.3', { - 'checksums': ['a8412512a132a74ed88fbe64a0a415e9ba5437a1b8a664990638e10915274ba0'], - }), - ('animation', '2.6', { - 'checksums': ['90293638920ac436e7e4de76ebfd92e1643ccdb0259b62128f16dd0b13245b0a'], - }), - ('bigmemory.sri', '0.1.3', { - 'checksums': ['55403252d8bae9627476d1f553236ea5dc7aa6e54da6980526a6cdc66924e155'], - }), - ('bigmemory', '4.5.36', { - 'checksums': ['18c67fbe6344b2f8223456c4f19ceebcf6c1166255eab81311001fd67a45ef0e'], - }), - ('calibrate', '1.7.5', { - 'checksums': ['33f4f6874f0a979af3ce592ed1105e829d3df1fbf05c6e0cd3829a13b21d82e8'], - }), - ('clusterGeneration', '1.3.4', { - 'checksums': ['7c591ad95a8a9d7fb0e4d5d80dfd78f7d6a63cf7d11eb53dd3c98fdfb5b868aa'], - }), - ('dismo', '1.1-4', { - 'checksums': ['f2110f716cd9e4cca5fd2b22130c6954658aaf61361d2fe688ba22bbfdfa97c8'], - }), - ('extrafontdb', '1.0', { - 'checksums': ['faa1bafee5d4fbc24d03ed237f29f1179964ebac6e3a46ac25b0eceda020b684'], - }), - ('Rttf2pt1', '1.3.8', { - 'checksums': ['560646d4488bf70edd8f785a99e8208e7fd004014e29cb52b050fb55e7176e2c'], - }), - ('extrafont', '0.17', { - 'checksums': ['2f6d7d79a890424b56ddbdced361f8b9ddede5edd33e090b816b88a99315332d'], - }), - ('fields', '10.3', { - 'checksums': ['490bff3637edd6d42b578776648be031486fc38cdbe668fd46b07c2add3e698a'], - }), - ('shapefiles', '0.7', { - 'checksums': ['eeb18ea4165119519a978d4a2ba1ecbb47649deb96a7f617f5b3100d63b3f021'], - }), - ('fossil', '0.4.0', { - 'checksums': ['37c082fa15ebae89db99d6071b2bb2cad6a97a0405e9b4ef77f62a8f6ad274c1'], - }), - ('geiger', '2.0.6.4', { - 'checksums': ['8ddc12779b86b14b173a5c72a28c4e22784e4a7a48e6c806e48a097c2928af64'], - }), - ('shape', '1.4.4', { - 'checksums': ['f4cb1b7d7c84cf08d2fa97f712ea7eb53ed5fa16e5c7293b820bceabea984d41'], - }), - ('glmnet', '3.0-2', { - 'checksums': ['f48956a75af7e2be045198873fc9eb637a549af1db83dcf76cac3774bfb3762c'], - }), - ('crosstalk', '1.1.0.1', { - 'checksums': ['36a70b10bc11826e314c05f9579fd791b9ac3b3a2cfed4d4ca74ce1ad991300e'], - }), - ('miniUI', '0.1.1.1', { - 'checksums': ['452b41133289f630d8026507263744e385908ca025e9a7976925c1539816b0c0'], - }), - ('webshot', '0.5.2', { - 'checksums': ['f183dc970157075b51ac543550a7a48fa3428b9c6838abb72fe987c21982043f'], - }), - ('manipulateWidget', '0.10.1', { - 'checksums': ['9d621192121f6b516bc7f1a18305995bfb7838c6683ac701422afc03a50e27ee'], - }), - ('rgl', '0.100.54', { - 'checksums': ['17b7f8f135f526aba17dc516952f692daa7a7d6e787157fdff8dd5175113fad5'], - }), - ('Rtsne', '0.15', { - 'checksums': ['56376e4f0a382fad3d3d40e2cb0562224be5265b827622bcd235e8fc63df276c'], - }), - ('labdsv', '2.0-1', { - 'checksums': ['5a4d55e9be18222dc47e725008b450996448ab117d83e7caaa191c0f13fd3925'], - }), - ('stabs', '0.6-3', { - 'checksums': ['e961ae21d45babc1162b6eeda874c4e3677fc286fd06f5427f071ad7a5064a9f'], - }), - ('modeltools', '0.2-23', { - 'checksums': ['6b3e8d5af1a039db5c178498dbf354ed1c5627a8cea9229726644053443210ef'], - }), - ('strucchange', '1.5-2', { - 'checksums': ['7d247c5ae6f5a63c80e478799d009c57fb8803943aa4286d05f71235cc1002f8'], - }), - ('TH.data', '1.0-10', { - 'checksums': ['618a1c67a30536d54b1e48ba3af46a6edcd6c2abef17935b5d4ba526a43aff55'], - }), - ('multcomp', '1.4-13', { - 'checksums': ['d30f0357b8307e7feb574d6d307e0ebc6bdca66b2cc172980fa5309685885fdb'], - }), - ('libcoin', '1.0-5', { - 'checksums': ['0a744164e00557d2f3e888d14cfd6108d17c14e983db620f74c7a5475be8a9b2'], - }), - ('matrixStats', '0.56.0', { - 'checksums': ['39e34a3dc480b9df05bb1a555eaef1dc1971a53f3ea6e01eb3a68bd1d3760f27'], - }), - ('coin', '1.3-1', { - 'checksums': ['5de2519a6e2b059bba9d74c58085cccaff1aaaa0454586ed164a108ebd1b2062'], - }), - ('party', '1.3-4', { - 'checksums': ['7689bd4fe7968ef1981147c5ad11237d630eddd5789a05c090339898eff71e7f'], - }), - ('inum', '1.0-1', { - 'checksums': ['3c2f94c13c03607e05817e4859595592068b55e810fed94e29bc181ad248a099'], - }), - ('partykit', '1.2-7', { - 'checksums': ['5c993c729c2975095eb27e6363eeb1c8a8ba22035f226f598af9d43a4ca312c1'], - }), - ('mboost', '2.9-2', { - 'checksums': ['34c6ba2051adc5ff429a594f7144bffcb7b129d5ff7c28a14cf21f38dbd554aa'], - }), - ('msm', '1.6.8', { - 'checksums': ['f3f18a9ea622a6d56f0f6d675b4890081d6def8b91a694c6764dac0d1cf262b4'], - }), - ('nor1mix', '1.3-0', { - 'checksums': ['9ce4ee92f889a4a4041b5ea1ff09396780785a9f12ac46f40647f74a37e327a0'], - }), - ('np', '0.60-10', { - 'checksums': ['a27b4bbca8b83a289c98920c1c8f5e9979ba9772086893252a4297dd2698081a'], - }), - ('polynom', '1.4-0', { - 'checksums': ['c5b788b26f7118a18d5d8e7ba93a0abf3efa6603fa48603c70ed63c038d3d4dd'], - }), - ('polspline', '1.1.17', { - 'checksums': ['d67b269d01105d4a6ea774737e921e66e065a859d1931ae38a70f88b6fb7ee30'], - }), - ('rms', '5.1-4', { - 'checksums': ['38f5844c4944a95b2adebea6bb1d163111270b8662399ea0349c45c0758076a6'], - }), - ('RWekajars', '3.9.3-2', { - 'checksums': ['16e6b019aab1646f89c5203f0d6fc1cb800129e5169b15aaef30fd6236f5da1a'], - }), - ('RWeka', '0.4-42', { - 'checksums': ['84e53028875d4603bd073c77709941d358152b8274977d45934fa89121b02104'], - }), - ('slam', '0.1-47', { - 'checksums': ['696356a68aa92059fa794444faa4c1775193c723a262a5f75de3c3c3047fcf89'], - }), - ('tm', '0.7-7', { - 'checksums': ['d0dbe41ff8414bdc2eee06a1b0d6db4567850135c4c6ff0a9c9ca8239166d15f'], - }), - ('TraMineR', '2.2-0', { - 'checksums': ['eeaeaf5151ec7a6b7179fd04dbdfb16479b4893e1547ccc29be74e444691d0f6'], - }), - ('chemometrics', '1.4.2', { - 'checksums': ['b705832fa167dc24b52b642f571ed1efd24c5f53ba60d02c7797986481b6186a'], - }), - ('FNN', '1.1.3', { - 'checksums': ['de763a25c9cfbd19d144586b9ed158135ec49cf7b812938954be54eb2dc59432'], - }), - ('ipred', '0.9-9', { - 'checksums': ['0da87a70730d5a60b97e46b2421088765e7d6a7cc2695757eba0f9d31d86416f'], - }), - ('miscTools', '0.6-26', { - 'checksums': ['be3c5a63ca12ce7ce4d43767a1815cd3dcf32664728ade251cfb03ea6f77fc9a'], - }), - ('maxLik', '1.3-8', { - 'checksums': ['33404d10bfe7746cab8227b880b50808a63909036daf6fedbac94c75ac68dfe5'], - }), - ('gbRd', '0.4-11', { - 'checksums': ['0251f6dd6ca987a74acc4765838b858f1edb08b71dbad9e563669b58783ea91b'], - }), - ('Rdpack', '0.11-1', { - 'checksums': ['58020f150be07209fd1fdd7f5e58c138863e850f4e4c1512d69250286e091e20'], - }), - ('mlogit', '1.0-3.1', { - 'checksums': ['e4b601d8f0d0bcd1c63468ab88aa305355d2811c60b038a5ba4b99245cf59b0c'], - }), - ('getopt', '1.20.3', { - 'checksums': ['531f5fdfdcd6b96a73df2b39928418de342160ac1b0043861e9ea844f9fbf57f'], - }), - ('gsalib', '2.1', { - 'checksums': ['e1b23b986c18b89a94c58d9db45e552d1bce484300461803740dacdf7c937fcc'], - }), - ('optparse', '1.6.6', { - 'checksums': ['51779d497146e9354b1153713d939e81551e08948c2b00e4b117b1377c0b60d0'], - }), - ('labelled', '2.3.0', { - 'checksums': ['9f16f168436039d7881d535a9f15fb0dce752fd3a28bce89192718cdbd043a50'], - }), - ('questionr', '0.7.0', { - 'checksums': ['c4566880a1ca8f01faad396e20d907d913f4a252acaf83a0cb508a3738874cb3'], - }), - ('klaR', '0.6-15', { - 'checksums': ['5bfe5bc643f8a64b222317732c26e9f93be297cdc318a869f15cc9ab0d9e0fae'], - }), - ('neuRosim', '0.2-12', { - 'checksums': ['f4f718c7bea2f4b61a914023015f4c71312f8a180124dcbc2327b71b7be256c3'], - }), - ('locfit', '1.5-9.4', { - 'checksums': ['d9d3665c5f3d49f698fb4675daf40a0550601e86db3dc00f296413ceb1099ced'], - }), - ('GGally', '1.5.0', { - 'checksums': ['069261cd469e2d2c8c794b2956e69c356b471eccfc45a60c55e55dfd83185a20'], - }), - ('beanplot', '1.2', { - 'checksums': ['49da299139a47171c5b4ccdea79ffbbc152894e05d552e676f135147c0c9b372'], - }), - ('clValid', '0.6-6', { - 'checksums': ['c13ef1b6258e34ba53615b78f39dbe4d8ba47b976b3c24a3eedaecf5ffba19ed'], - }), - ('DiscriMiner', '0.1-29', { - 'checksums': ['5aab7671086ef9940e030324651976456f0e84dab35edb7048693ade885228c6'], - }), - ('ellipse', '0.4.1', { - 'checksums': ['1a9a9c52195b26c2b4d51ad159ab98aff7aa8ca25fdc6b2198818d1a0adb023d'], - }), - ('leaps', '3.1', { - 'checksums': ['3d7c3a102ce68433ecf167ece96a7ebb4207729e4defd0ac8fc00e7003f5c3b6'], - }), - ('pbkrtest', '0.4-8.6', { - 'checksums': ['5f863b167968d97ea504f3fffabc1b4c922e244d4e194e013229960d3384bd68'], - }), - ('carData', '3.0-3', { - 'checksums': ['986b84bdd289159eead8b050ea82600a4f77bf0bbe0293a7c7b25d607ff7e231'], - }), - ('maptools', '0.9-9', { - 'checksums': ['69ba3b2cd50260f78fb6c25cf0557b4a0d31498d6a4f4ff00e466334fba4946c'], - }), - ('zip', '2.0.4', { - 'checksums': ['ab5dd0c63bd30b478d0f878735e7baf36e2e76e4d12d2b4b8eddd03b665502b0'], - }), - ('openxlsx', '4.1.4', { - 'checksums': ['07a38b21f6ce6e92d58d7a51ea9f4b5fd77db49b019a18ba9ecea69878a39dd7'], - }), - ('rematch', '1.0.1', { - 'checksums': ['a409dec978cd02914cdddfedc974d9b45bd2975a124d8870d52cfd7d37d47578'], - }), - ('cellranger', '1.1.0', { - 'checksums': ['5d38f288c752bbb9cea6ff830b8388bdd65a8571fd82d8d96064586bd588cf99'], - }), - ('readxl', '1.3.1', { - 'checksums': ['24b441713e2f46a3e7c6813230ad6ea4d4ddf7e0816ad76614f33094fbaaaa96'], - }), - ('rio', '0.5.16', { - 'checksums': ['d3eb8d5a11e0a3d26169bb9d08f834a51a6516a349854250629072d59c29d465'], - }), - ('car', '3.0-7', { - 'checksums': ['ad98a2f0f47105285d6677b398fc1b169cc20458e799e05dae47c84068984e87'], - }), - ('flashClust', '1.01-2', { - 'checksums': ['48a7849bb86530465ff3fbfac1c273f0df4b846e67d5eee87187d250c8bf9450'], - }), - ('ggrepel', '0.8.2', { - 'checksums': ['0d01bfc005e9af5e6b57e2a677781424387f38ec208818295eb87dd5867551e1'], - }), - ('FactoMineR', '2.3', { - 'checksums': ['c64f30a3839a375395a3b7d8a4131e1df74aea31da6348d7a506eaa9da70af51'], - }), - ('flexclust', '1.4-0', { - 'checksums': ['82fe445075a795c724644864c7ee803c5dd332a89ea9e6ccf7cd1ae2d1ecfc74'], - }), - ('flexmix', '2.3-15', { - 'checksums': ['ba444c0bfe33ab87d440ab590c06b03605710acd75811c1622253171bb123f43'], - }), - ('prabclus', '2.3-2', { - 'checksums': ['f421bcbcb557281e0de4a06b15f9a496adb5c640e883c0f7bb12051efc69e441'], - }), - ('diptest', '0.75-7', { - 'checksums': ['462900100ca598ef21dbe566bf1ab2ce7c49cdeab6b7a600a50489b05f61b61b'], - }), - ('trimcluster', '0.1-5', { - 'checksums': ['9239f20e4a06ac2fa89e5d5d89b23a45c8c534a7264d89bede8a35d43dda518b'], - }), - ('fpc', '2.2-5', { - 'checksums': ['45855d446593b93ea0873d701a6c7c6b47335a67ab34066e4cc8ae1d3f24a080'], - }), - ('BiasedUrn', '1.07', { - 'checksums': ['2377c2e59d68e758a566452d7e07e88663ae61a182b9ee455d8b4269dda3228e'], - }), - ('TeachingDemos', '2.12', { - 'checksums': ['3e75405ce1affa406d6df85e06f96381412bc7a2810b25d8c81bfe64c4698644'], - }), - ('kohonen', '3.0.10', { - 'checksums': ['996956ea46a827c9f214e4f940a19304a0ff35bda707d4d7312f80d3479067b2'], - }), - ('base64', '2.0', { - 'checksums': ['8e259c2b12446197d1152b83a81bab84ccb5a5b77021a9b5645dd4c63c804bd1'], - }), - ('doRNG', '1.8.2', { - 'checksums': ['33e9d45b91b0fde2e35e911b9758d0c376049121a98a1e4c73a1edfcff11cec9'], - }), - ('nleqslv', '3.3.2', { - 'checksums': ['f54956cf67f9970bb3c6803684c84a27ac78165055745e444efc45cfecb63fed'], - }), - ('Deriv', '4.0', { - 'checksums': ['76788764177b24dc27f4e27046fa563ad97014e0d53e14a880ebff2f9177b40e'], - }), - ('RGCCA', '2.1.2', { - 'checksums': ['20f341fca8f616c556699790814debdf2ac7aa4dd9ace2071100c66af1549d7d'], - }), - ('pheatmap', '1.0.12', { - 'checksums': ['579d96ee0417203b85417780eca921969cda3acc210c859bf9dfeff11539b0c1'], - }), - ('pvclust', '2.2-0', { - 'checksums': ['7892853bacd413b5a921006429641ad308a344ca171b3081c15e4c522a8b0201'], - }), - ('RCircos', '1.2.1', { - 'checksums': ['3b9489ab05ea83ead99ca6e4a1e6830467a2064779834aff1317b42bd41bb8fd'], - }), - ('lambda.r', '1.2.4', { - 'checksums': ['d252fee39065326c6d9f45ad798076522cec05e73b8905c1b30f95a61f7801d6'], - }), - ('futile.options', '1.0.1', { - 'checksums': ['7a9cc974e09598077b242a1069f7fbf4fa7f85ffe25067f6c4c32314ef532570'], - }), - ('futile.logger', '1.4.3', { - 'checksums': ['5e8b32d65f77a86d17d90fd8690fc085aa0612df8018e4d6d6c1a60fa65776e4'], - }), - ('VennDiagram', '1.6.20', { - 'checksums': ['e51cb3fff23c6ec8191966490bf875a7415f8725d4054bae881a25febb9281c5'], - }), - ('xlsxjars', '0.6.1', { - 'checksums': ['37c1517f95f8bca6e3514429394d2457b9e62383305eba288416fb53ab2e6ae6'], - }), - ('xlsx', '0.6.3', { - 'checksums': ['e5a9b8ead1b4502e7a1143a1d842d4994dd92f333a95a00d81a27ef62c5e035e'], - }), - ('uroot', '2.1-0', { - 'checksums': ['3c02a9dadd22aa67a59e99007ab6f576dc428859fa746d3a8f3ffa2bb43d18c2'], - }), - ('forecast', '8.12', { - 'checksums': ['eb607fd584d66abf39b14c00d50111304e892a6e0778c9a8354195c6c92f92f9'], - }), - ('fma', '2.4', { - 'checksums': ['69a94c3bd464176a80232d49fcd04d478d4dd59f9bf128d6a9f46e49612d27f4'], - }), - ('expsmooth', '2.3', { - 'checksums': ['ac7da36347f983d6ec71715daefd2797fe2fc505c019f4965cff9f77ce79982a'], - }), - ('fpp', '0.5', { - 'checksums': ['9c87dd8591b8a87327cae7a03fd362a5492495a96609e5845ccbeefb96e916cb'], - }), - ('tensor', '1.5', { - 'checksums': ['e1dec23e3913a82e2c79e76313911db9050fb82711a0da227f94fc6df2d3aea6'], - }), - ('polyclip', '1.10-0', { - 'checksums': ['74dabc0dfe5a527114f0bb8f3d22f5d1ae694e6ea9345912909bae885525d34b'], - }), - ('goftest', '1.2-2', { - 'checksums': ['e497992666b002b6c6bed73bf05047ad7aa69eb58898da0ad8f1f5b2219e7647'], - }), - ('spatstat.utils', '1.17-0', { - 'checksums': ['39cd683ed7f41d8adc9e28af073d91b244aa1cf5ad966dfbb396ee3ee79f0922'], - }), - ('spatstat.data', '1.4-3', { - 'checksums': ['8955b6ac40cc7d0d89e02334bb46f4c223ff0755e5818f132fee753e77918ea2'], - }), - ('spatstat', '1.63-3', { - 'checksums': ['07b4a1a1b37c91944f31779dd789598f4a5ad047a3de3e9ec2ca99b9e9565528'], - }), - ('pracma', '2.2.9', { - 'checksums': ['0cea0ff5e88643df121e07b9aebfe57084c61e11801680039752f371fe87bf1e'], - }), - ('RCurl', '1.98-1.2', { - 'checksums': ['5d74a0cdc3c5684b0348b959f67039e3c2a5da2bbb6176f6800a94124895a7a8'], - }), - ('bio3d', '2.4-1', { - 'checksums': ['679fbd87fe9fb82a65427d281d3b68906509e411270cd87d2deb95d404333c1f'], - }), - ('AUC', '0.3.0', { - 'checksums': ['e705f2c63d336249d19187f3401120d738d42d323fce905f3e157c2c56643766'], - }), - ('interpretR', '0.2.4', { - 'checksums': ['4c08a6dffd6fd5764f27812f3a085c53e6a21d59ae82d903c9c0da93fd1dd059'], - }), - ('cvAUC', '1.1.0', { - 'checksums': ['c4d8ed53b93869650aa2f666cf6d1076980cbfea7fa41f0b8227595be849738d'], - }), - ('SuperLearner', '2.0-26', { - 'checksums': ['4462922c8daae2773f79ecdea7ca3cc4ea51bfd101c5e6c1ad22f9190e746081'], - }), - ('mediation', '4.5.0', { - 'checksums': ['210206618787c395a67689be268283df044deec7199d9860ed95218ef1e60845'], - }), - ('ModelMetrics', '1.2.2.2', { - 'checksums': ['5e06f1926aebca5654e1329c66ef19b04058376b2277ebb16e3bf8c208d73457'], - }), - ('CVST', '0.2-2', { - 'checksums': ['854b8c983427ecf9f2f7798c4fd1c1d06762b5b0bcb1045502baadece6f78316'], - }), - ('DRR', '0.0.4', { - 'checksums': ['93e365a4907e301ae01f7d943e6bdcda71ef23c51a4759ba3c94bcf842d4e0f8'], - }), - ('dimRed', '0.2.3', { - 'checksums': ['e6e56e3f6999ebdc326e64ead5269f3aaf61dd587beefafb7536ac3890370d84'], - }), - ('lubridate', '1.7.8', { - 'checksums': ['3da19922fc373e113ecc58c4984955ba26da703edc9c991bd444b7077d4b553c'], - }), - ('ddalpha', '1.3.11', { - 'checksums': ['c30b4a3a9549cb4dc0a8e51e06f5b6e4c457c5326acc8f4680968c920f59b6e9'], - }), - ('gower', '0.2.1', { - 'checksums': ['af3fbe91cf818c0841b2c0ec4ddf282c182a588031228c8d88f7291b2cdff100'], - }), - ('RcppRoll', '0.3.0', { - 'checksums': ['cbff2096443a8a38a6f1dabf8c90b9e14a43d2196b412b5bfe5390393f743f6b'], - }), - ('recipes', '0.1.10', { - 'checksums': ['4f345e31568e41b3efb6c6333e8ccab032e293dbd0256299d922fe6c9532c985'], - }), - ('caret', '6.0-86', { - 'checksums': ['da4a1c7c3fbf645c5b02871e563a77404622b83623f0d1c5dc1425de7aa4ce37'], - }), - ('adabag', '4.2', { - 'checksums': ['47019eb8cefc8372996fbb2642f64d4a91d7cedc192690a8d8be6e7e03cd3c81'], - }), - ('parallelMap', '1.5.0', { - 'checksums': ['4afa727f4786279718cc799e45e91859a46f5cbc1ee652b0f47ae3b9f9d45e4e'], - }), - ('ParamHelpers', '1.14', { - 'checksums': ['b17652d0a69de3241a69f20be4ad1bfe02c413328a17f3c1ac7b73886a6ba2eb'], - }), - ('ggvis', '0.4.5', { - 'checksums': ['82373c3565c299279f6849f798cc39127b2b3f7ff2deee1946528474824b3124'], - }), - ('mlr', '2.17.1', { - 'checksums': ['0b71b9d00c627647cf5fc1f456d4445f025c90be2f974e05ccdb84e25ba1923b'], - }), - ('unbalanced', '2.0', { - 'checksums': ['9be32b1ce9d972f1abfff2fbe18f5bb5ba9c3f4fb1282063dc410b82ad4d1ea2'], - }), - ('RSNNS', '0.4-12', { - 'checksums': ['b18dfeda71573bc92c6888af72da407651bff7571967965fd3008f0d331743b9'], - }), - ('abc.data', '1.0', { - 'checksums': ['b242f43c3d05de2e8962d25181c6b1bb6ca1852d4838868ae6241ca890b161af'], - }), - ('abc', '2.1', { - 'checksums': ['0bd2dcd4ee1915448d325fb5e66bee68e0497cbd91ef67a11b400b2fbe52ff59'], - }), - ('lhs', '1.0.2', { - 'checksums': ['e2945192740fb088b210786006b311d3d4e7da967733a1998380d597320c1158'], - }), - ('tensorA', '0.36.1', { - 'checksums': ['c7ffe12b99867675b5e9c9f31798f9521f14305c9d9f9485b171bcbd8697d09c'], - }), - ('EasyABC', '1.5', { - 'checksums': ['1dd7b1383a7c891cafb34d9cec65d92f1511a336cff1b219e63c0aa791371b9f'], - }), - ('whisker', '0.4', { - 'checksums': ['7a86595be4f1029ec5d7152472d11b16175737e2777134e296ae97341bf8fba8'], - }), - ('commonmark', '1.7', { - 'checksums': ['d14a767a3ea9778d6165f44f980dd257423ca6043926e3cd8f664f7171f89108'], - }), - ('roxygen2', '7.1.0', { - 'checksums': ['7e9b36f6e7c01a5c8c4747340b3d0c064ce2e48c93fcfbfe45139854fae74103'], - }), - ('git2r', '0.26.1', { - 'checksums': ['13d609286a0af4ef75ba76f2c2f856593603b8014e311b88896243a50b417435'], - }), - ('rversions', '2.0.1', { - 'checksums': ['51ec1f64e7d628e88d716a020d5d521eba71d472e3c9ae7b694428ef6dd786c5'], - }), - ('xopen', '1.0.0', { - 'checksums': ['e207603844d69c226142be95281ba2f4a056b9d8cbfae7791ba60535637b3bef'], - }), - ('sessioninfo', '1.1.1', { - 'checksums': ['166b04678448a7decd50f24afabe5e2ad613e3c55b180ef6e8dd7a870a1dae48'], - }), - ('rcmdcheck', '1.3.3', { - 'checksums': ['1ab679eb1976d74cd3be5bcad0af7fcc673dbdfd4406bbce32591c8fddfb93b4'], - }), - ('remotes', '2.1.1', { - 'checksums': ['4e590746fce618094089372b185e1ea234b3337b23c44c44118e942d0fb5118b'], - }), - ('fs', '1.4.1', { - 'checksums': ['ae9103dff26ca56a34901408bd650a2949f491b2a0886c686a51a179d38b7a4e'], - }), - ('clisymbols', '1.2.0', { - 'checksums': ['0649f2ce39541820daee3ed408d765eddf83db5db639b493561f4e5fbf88efe0'], - }), - ('ini', '0.3.1', { - 'checksums': ['7b191a54019c8c52d6c2211c14878c95564154ec4865f57007953742868cd813'], - }), - ('gh', '1.1.0', { - 'checksums': ['de9faf383c3fe5e87a75391d82cf71b1331b3c80cd00c4203146a303825d89ad'], - }), - ('rematch2', '2.1.1', { - 'checksums': ['d0423a418e8b46ac3a4819af7a7d19c39ca7c8c862c1e9a1c1294aa19152518f'], - }), - ('usethis', '1.6.1', { - 'checksums': ['60339059a97ed07dea7f8908b828b5bb42e0fd0b471165c061bc9660b0d59d6f'], - }), - ('DT', '0.13', { - 'checksums': ['79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5'], - }), - ('rex', '1.2.0', { - 'checksums': ['06b491f1469078862e40543fd74e1d38b2e0fb61fdf01c8083add4b11ac2eb54'], - }), - ('covr', '3.5.0', { - 'checksums': ['cb919912018130164a40803ac573a37dde2186678c058c03c6303d79604979df'], - }), - ('devtools', '2.3.0', { - 'checksums': ['4fc375c171335c67bd71df4e0b1b3dff2ae3aa17b3e0566b790ba0808b39dcd0'], - }), - ('Rook', '1.1-1', { - 'checksums': ['00f4ecfa4c5c57018acbb749080c07154549a6ecaa8d4130dd9de79427504903'], - }), - ('Cairo', '1.5-12', { - 'checksums': ['4e08eafb8c44045d16674ee5ae659f182ffe13ca86076fb077832947aa4a620b'], - }), - ('RMTstat', '0.3', { - 'checksums': ['81eb4c5434d04cb66c749a434c33ceb1c07d92ba79765d4e9233c13a092ec2da'], - }), - ('Lmoments', '1.3-1', { - 'checksums': ['7c9d489a08f93fa5877e2f233ab9732e0d1b2761596b3f6ac91f2295e41a865d'], - }), - ('distillery', '1.0-7', { - 'checksums': ['898833ceceed5291b4a02bf62c6fa5b78dd7837f9cc5a42b87a08672c7dae270'], - }), - ('extRemes', '2.0-11', { - 'checksums': ['75fbdeef677c81cf5661b8df3df4090c55f53e9bb96bb138b498eb0fbbf5af42'], - }), - ('tkrplot', '0.0-24', { - 'checksums': ['2873630a37d7ae1e09a5803d9a89ca0494edd83526c7b1860d9246543722f311'], - }), - ('misc3d', '0.8-4', { - 'checksums': ['75de3d2237f67f9e58a36e80a6bbf7e796d43eb46789f2dd1311270007bf5f62'], - }), - ('multicool', '0.1-11', { - 'checksums': ['1c907e64af2ac39facdf431a5691e69649f64af1f50e198ae39da5bf30026476'], - }), - ('plot3D', '1.3', { - 'checksums': ['b9e4ec2789e34ad249318900e186868650e1a33466b385cb492a45466db3dfc9'], - }), - ('plot3Drgl', '1.0.1', { - 'checksums': ['466d428d25c066c9c96d892f24da930513d42b1bdf76d3b53628c3ba13c3e48a'], - }), - ('OceanView', '1.0.5', { - 'checksums': ['c16e1bed97f4ede46dc017fdd6bd7575d925b57bd2601317bd3ad2357609f885'], - }), - ('ks', '1.11.7', { - 'checksums': ['6a6d9c2366e85a4c6af39b798f3798d20a42615ddfcebcedf6cf56087cdfd2b8'], - }), - ('logcondens', '2.1.5', { - 'checksums': ['72e61abc1f3eb28830266fbe5b0da0999eb5520586000a3024e7c26be93c02eb'], - }), - ('Iso', '0.0-18', { - 'checksums': ['2d7e8c4452653364ee086d95cea620c50378e30acfcff129b7261e1756a99504'], - }), - ('penalized', '0.9-51', { - 'checksums': ['eaa80dca99981fb9eb576261f30046cfe492d014cc2bf286c447b03a92e299fd'], - }), - ('clusterRepro', '0.9', { - 'checksums': ['940d84529ff429b315cf4ad25700f93e1156ccacee7b6c38e4bdfbe2d4c6f868'], - }), - ('randomForestSRC', '2.9.3', { - 'checksums': ['bc47bef9e5afade8fdf56e08ae0ad320e424dfa5b11a32cd2d166c9988dc2e16'], - }), - ('sm', '2.2-5.6', { - 'checksums': ['b890cd7ebe8ed711ab4a3792c204c4ecbe9e6ca1fd5bbc3925eba5833a839c30'], - }), - ('pbivnorm', '0.6.0', { - 'checksums': ['07c37d507cb8f8d2d9ae51a9a6d44dfbebd8a53e93c242c4378eaddfb1cc5f16'], - }), - ('lavaan', '0.6-5', { - 'checksums': ['feeb6e1b419aa1d54fd5af1d67260b5d13ff251c19de8136a4df565305d47b12'], - }), - ('matrixcalc', '1.0-3', { - 'checksums': ['17e6caeeecd596b850a6caaa257984398de9ec5d2b41ce83c428f112614b9cb0'], - }), - ('arm', '1.11-1', { - 'checksums': ['7b82dbe8c5141546d11b0af656a6addda4c07f06fc165d01c7c1e39540b55444'], - }), - ('mi', '1.0', { - 'checksums': ['34f44353101e8c3cb6bf59c5f4ff5b2391d884dcbb9d23066a11ee756b9987c0'], - }), - ('visNetwork', '2.0.9', { - 'checksums': ['5e0b3dc3a91e66e0a359433f03cc856d04b981b0f9ad228d8fa9c96b7fcaa420'], - }), - ('servr', '0.16', { - 'checksums': ['cc950bedbd52f2d93c54157dc5b261113be6baee2d9e90e99a8de048c09fda80'], - }), - ('rgexf', '0.16.0', { - 'checksums': ['2a671df9ac70cfefd4092754317cb28e32a33df345b80e1975bf838e838245ee'], - }), - ('influenceR', '0.1.0', { - 'checksums': ['4fc9324179bd8896875fc0e879a8a96b9ef2a6cf42a296c3b7b4d9098519e98a'], - }), - ('downloader', '0.4', { - 'checksums': ['1890e75b028775154023f2135cafb3e3eed0fe908138ab4f7eff1fc1b47dafab'], - }), - ('DiagrammeR', '1.0.5', { - 'checksums': ['0877af707925b03c58a7e00cd84eb4e9906b551a61d86130ef4165477654e334'], - }), - ('sem', '3.1-9', { - 'checksums': ['4a33780202506543da85877cd2813250114420d6ec5e75457bc67477cd332cb9'], - }), - ('network', '1.16.0', { - 'checksums': ['a24f51457439c7186ffa1fe53719742c501929ac1a354e458754a83f280fce36'], - }), - ('statnet.common', '4.3.0', { - 'checksums': ['834a3359eac967df0420eee416ae4983e3b502a3de56bb24f494a7ca4104e959'], - }), - ('sna', '2.5', { - 'checksums': ['13b508cacb0bf1e79b55d5c8f7e9ada3b173468d4d6d5f1dc606990ac03071c8'], - }), - ('glasso', '1.11', { - 'checksums': ['4c37844b26f55985184a734e16b8fe880b192e3d2763614b0ab3f99b4530e30a'], - }), - ('huge', '1.3.4.1', { - 'checksums': ['78ef9eae464d52c5247998b9514a81b178419b857b1a6c00d885e3ae6c03a886'], - }), - ('d3Network', '0.5.2.1', { - 'checksums': ['5c798dc0c87c6d574abb7c1f1903346e6b0fec8adfd1df7aef5e4f9e7e3a09be'], - }), - ('BDgraph', '2.62', { - 'checksums': ['7e5de4406f4a7873bf948852291d2851a2ab312288467687dd5c0392b2723bac'], - }), - ('pbapply', '1.4-2', { - 'checksums': ['ac19f209f36f4fa3d0f5b14b6cc5b0c279996fb9d3e86c848c0f6d03c025b3f6'], - }), - ('graphlayouts', '0.7.0', { - 'checksums': ['20464b60376d9f8d522eec6a7495054b1715e4919f10e9a049868d8866398c9e'], - }), - ('tweenr', '1.0.1', { - 'checksums': ['efd68162cd6d5a4f6d833dbf785a2bbce1cb7b9f90ba3fb060931a4bd705096b'], - }), - ('ggforce', '0.3.1', { - 'checksums': ['a05271da9b226c12ae5fe6bc6eddb9ad7bfe19e1737e2bfcd6d7a89631332211'], - }), - ('tidygraph', '1.1.2', { - 'checksums': ['5642001d4cccb122d66481b7c61a06c724c02007cbd356ee61cb29726a56fafe'], - }), - ('ggraph', '2.0.2', { - 'checksums': ['80caab7a38f2548a9fcd1ff3655a6bdbcb776fe662e3d93c17798bf2a04078b2'], - }), - ('qgraph', '1.6.5', { - 'checksums': ['2295ccca41f84cba34ad0e6c1b31af8bde79bda7373754c255e0ee9e63d29e5f'], - }), - ('HWxtest', '1.1.9', { - 'checksums': ['a37309bed4a99212ca104561239d834088217e6c5e5e136ff022544c706f25e6'], - }), - ('diveRsity', '1.9.90', { - 'checksums': ['b8f49cdbfbd82805206ad293fcb2dad65b962fb5523059a3e3aecaedf5c0ee86'], - }), - ('doSNOW', '1.0.18', { - 'checksums': ['70e7bd82186e477e3d1610676d4c6a75258ac08f104ecf0dcc971550ca174766'], - }), - ('geepack', '1.3-1', { - 'checksums': ['823153ca28e1a8bd8a45de778279480c1c35e063d62c8955b6cea1602f28d6df'], - }), - ('biom', '0.3.12', { - 'checksums': ['4ad17f7811c7346dc4923bd6596a007c177eebb1944a9f46e5674afcc5fdd5a1'], - }), - ('pim', '2.0.2', { - 'checksums': ['1195dbdbd67348dfef4b6fc34fcec643da685ebe58d34bbe049ab121aca9944f'], - }), - ('minpack.lm', '1.2-1', { - 'checksums': ['14cb7dba3ef2b46da0479b46d46c76198e129a31f6157cd8b37f178adb15d5a3'], - }), - ('rootSolve', '1.8.2.1', { - 'checksums': ['488451182663197ae4513e46e24f72cadb2297d35a58a3007a0dbf1bf0833031'], - }), - ('diagram', '1.6.4', { - 'checksums': ['7c2bc5d5d634c3b8ca7fea79fb463e412962d88f47a77a74c811cc62f375ce38'], - }), - ('FME', '1.3.6.1', { - 'checksums': ['ae0c69f75614e2ef9f2096c205c7f8eb90485c6311213762c1416ece4036be18'], - }), - ('bmp', '0.3', { - 'checksums': ['bdf790249b932e80bc3a188a288fef079d218856cf64ffb88428d915423ea649'], - }), - ('tiff', '0.1-5', { - 'checksums': ['9514e6a9926fcddc29ce1dd12b1072ad8265900373f738de687ef4a1f9124e2b'], - }), - ('readbitmap', '0.1.5', { - 'checksums': ['737d7d585eb33de2c200da64d16781e3c9522400fe2af352e1460c6a402a0291'], - }), - ('imager', '0.42.1', { - 'checksums': ['cb9c0f8dbf1383951bf96f5aeded1e774c26135a0117279de7e728cb6822eab4'], - }), - ('signal', '0.7-6', { - 'checksums': ['6b60277b07cf0167f8272059b128cc82f27a9bab1fd33d74c2a9e1f2abca5def'], - }), - ('tuneR', '1.3.3', { - 'checksums': ['bdc3c2017b162d2ba0a249e80361a4f47202e763c21aecfc57380a482a3a692b'], - }), - ('pastecs', '1.3.21', { - 'checksums': ['8c1ef2affe88627f0b23295aa5edb758b8fd6089ef09f60f37c46445128b8d7c'], - }), - ('audio', '0.1-7', { - 'checksums': ['52e0397a45325aa9586ec68b94ab9e505bdefaf2a588d634fcb57a6a11659c74'], - }), - ('fftw', '1.0-6', { - 'checksums': ['397ef5ec354b919884f74fba4202bfc13ad11a70b16285c41677aad1d3b170ce'], - }), - ('seewave', '2.1.5', { - 'checksums': ['718b1fb1c289f92be50de099da36d20380d113cb1577569333fca6195f71e8e1'], - }), - ('gsw', '1.0-5', { - 'checksums': ['eb468918ee91e429b47fbcac43269eca627b7f64b61520de5bbe8fa223e96453'], - }), - ('oce', '1.2-0', { - 'checksums': ['99072f2b20ad471b5a2afeb4d0690cad57cc770d60769a5cb20d001511439aa2'], - }), - ('ineq', '0.2-13', { - 'checksums': ['e0876403f59a3dfc2ea7ffc0d965416e1ecfdecf154e5856e5f54800b3efda25'], - }), - ('soundecology', '1.3.3', { - 'checksums': ['276164d5eb92c78726c647be16232d2443acbf7061371ddde2672b4fdb7a069a'], - }), - ('memuse', '4.1-0', { - 'checksums': ['58d6d1ca5d6bd481f4ed299eff6a9d5660eb0f8db1abe54c49e144093cba72ad'], - }), - ('pinfsc50', '1.1.0', { - 'checksums': ['b6b9b6365a3f408533264d7ec820494f57eccaf362553e8478a46a8e5b474aba'], - }), - ('vcfR', '1.10.0', { - 'checksums': ['9e19c8b23c981b61320aa275821f9accae8738bca775175b1201fcc30479ae8d'], - }), - ('glmmML', '1.1.0', { - 'checksums': ['34f088a73ccf6092908502a5bdaaf8209e9134d38abbbd7c4dd559832e653188'], - }), - ('cowplot', '1.0.0', { - 'checksums': ['70f9a7c46d10f409d1599f1afc9fd3c947051cf2b430f01d903c64ef1e6c98a5'], - }), - ('tsne', '0.1-3', { - 'checksums': ['66fdf5d73e69594af529a9c4f261d972872b9b7bffd19f85c1adcd66afd80c69'], - }), - ('sn', '1.6-1', { - 'checksums': ['80071625131256147f94a1a35b6f0cabd6de8b225f16860e398b6a8ca688d96a'], - }), - ('tclust', '1.4-1', { - 'checksums': ['4b0be612c8ecd7b4eb19a44ab6ac8f5d40515600ae1144c55989b6b41335ad9e'], - }), - ('ranger', '0.12.1', { - 'checksums': ['fc308e0ac06718272799928e1a19612de16b05bde481d8f38e11a101df5425ef'], - }), - ('hexbin', '1.28.1', { - 'checksums': ['42d092c709ebc84b18df8121beb6bd1d8a3f6f357afd5c3490757c4c4795c6e7'], - }), - ('pryr', '0.1.4', { - 'checksums': ['d39834316504c49ecd4936cbbcaf3ee3dae6ded287af42475bf38c9e682f721b'], - }), - ('moments', '0.14', { - 'checksums': ['2a3b81e60dafdd092d2bdd3513d7038855ca7d113dc71df1229f7518382a3e39'], - }), - ('laeken', '0.5.1', { - 'checksums': ['1aa94a1768969eb999f7a41212af2d8b2943b43a68a92f99c9f77929e19439a5'], - }), - ('VIM', '5.1.1', { - 'checksums': ['ca1430103b6bd658e318bbbbd9c25763d11d0b3f52706b1a7ea7fafd408e4270'], - }), - ('proxy', '0.4-24', { - 'checksums': ['8cff9bf036475941a7c44ba9bb5e2f6d4777d49ab3daaeb52d23f4b2af6d9c7c'], - }), - ('smoother', '1.1', { - 'checksums': ['91b55b82f805cfa1deedacc0a4e844a2132aa59df593f3b05676954cf70a195b'], - }), - ('dynamicTreeCut', '1.63-1', { - 'checksums': ['831307f64eddd68dcf01bbe2963be99e5cde65a636a13ce9de229777285e4db9'], - }), - ('beeswarm', '0.2.3', { - 'checksums': ['0115425e210dced05da8e162c8455526a47314f72e441ad2a33dcab3f94ac843'], - }), - ('vipor', '0.4.5', { - 'checksums': ['7d19251ac37639d6a0fed2d30f1af4e578785677df5e53dcdb2a22771a604f84'], - }), - ('ggbeeswarm', '0.6.0', { - 'checksums': ['bbac8552f67ff1945180fbcda83f7f1c47908f27ba4e84921a39c45d6e123333'], - }), - ('shinydashboard', '0.7.1', { - 'checksums': ['51a49945c6b8a684111a2ba4b2a5964e3a50610286ce0378e37ae02316620a4e'], - }), - ('rrcov', '1.5-2', { - 'checksums': ['a7641b93ca8efd91b0957adecd76f96c53d3804ace7b1cbe84872f655199c254'], - }), - ('WriteXLS', '5.0.0', { - 'checksums': ['5aeb631c7f4dee300a19ded493110d7241e1b79744be05beca770a01ffc1d7bf'], - }), - ('bst', '0.3-17', { - 'checksums': ['1ed161d33a7304abfa2fb23daeda2f870ad8483b7fa9b91e6fc8ced21fd8f074'], - }), - ('mpath', '0.3-25', { - 'checksums': ['3332f74255520152cb2149bdff24ad650a036161a7629f686c8fee804c0336e8'], - }), - ('timereg', '1.9.4', { - 'checksums': ['fbf4eeee1648fceb98773156764c32b3a9481f0fb9f8dc3a9d0331a9051cb54b'], - }), - ('peperr', '1.1-7.1', { - 'checksums': ['5d4eff0f0b61c0b3e479c2ac2978c8e32373b9630565bf58fee48ead6166698a'], - }), - ('heatmap3', '1.1.7', { - 'checksums': ['bab39bdcc462ed9e15dda54d58385b7c8d2bca800cd0e6ee2fce12475661b2bd'], - }), - ('GlobalOptions', '0.1.1', { - 'checksums': ['4249ef78424128050af83bbb8e71b4af82f8490c87f6a9d927782b80be830975'], - }), - ('circlize', '0.4.8', { - 'checksums': ['22d6908b9d2e496105d9b70b73a74152398e5e9e38c60042ffe041df2b4c794b'], - }), - ('GetoptLong', '0.1.8', { - 'checksums': ['6c0edb7233b79fb7f4789a825e8e7d7eee50b5e85b7fd5b7d74b9440fd9e1dd1'], - }), - ('dendextend', '1.13.4', { - 'checksums': ['c456b4f43075e8de0f29a6c997e1c0d4788487ab7b947a4b1bf05db2b4f94bde'], - }), - ('RInside', '0.2.16', { - 'checksums': ['7ae4ade128ea05f37068d59e610822ff0b277f9d39d8900f7eb31759ad5a2a0e'], - }), - ('limSolve', '1.5.6', { - 'checksums': ['b97ea9930383634c8112cdbc42f71c4e93fe0e7bfaa8f401921835cb44cb49a0'], - }), - ('dbplyr', '1.4.3', { - 'checksums': ['69ac7b4022c691e3822fc73fabb3bf073405d5a433c52f5f0f98cf90a1d228ea'], - }), - ('modelr', '0.1.6', { - 'checksums': ['d7e5f3ddf0b3e6520ca06229471f5bcd9e371e2fecd53c03202b474c2a1955f4'], - }), - ('debugme', '1.1.0', { - 'checksums': ['4dae0e2450d6689a6eab560e36f8a7c63853abbab64994028220b8fd4b793ab1'], - }), - ('reprex', '0.3.0', { - 'checksums': ['203c2ae6343f6ff887e7a5a3f5d20bae465f6e8d9745c982479f5385f4effb6c'], - }), - ('selectr', '0.4-2', { - 'checksums': ['5588aed05f3f5ee63c0d29953ef53da5dac7afccfdd04b7b22ef24e1e3b0c127'], - }), - ('rvest', '0.3.5', { - 'checksums': ['0e7f41be4ce6501d7af50575a2532d4bfd9153ca57900ee62dbc27c0a22c0a64'], - }), - ('tidyverse', '1.3.0', { - 'checksums': ['6d8acb81e994f9bef5e4dcf908bcea3786d108adcf982628235b6c8c80f6fe09'], - }), - ('R.cache', '0.14.0', { - 'checksums': ['18af4e372440b9f28b4b71346c8ed9de220232f9903730ccee2bfb3c612c16d9'], - }), - ('R.rsp', '0.43.2', { - 'checksums': ['f291a78ce9955943e0ebad1291f729dc4d9a8091f04b83fc4b1526bcb6c71f89'], - }), - ('listenv', '0.8.0', { - 'checksums': ['fd2aaf3ff2d8d546ce33d1cb38e68401613975117c1f9eb98a7b41facf5c485f'], - }), - ('globals', '0.12.5', { - 'checksums': ['1519a7668b4b549c081f60a5f6b71d8d1dc8833f618125f6c0e4caf8b48a48c1'], - }), - ('future', '1.17.0', { - 'checksums': ['2fa3b88439eaa33901669295186d04eb54f033257015683cf8a2e3c7f83b9e34'], - }), - ('gdistance', '1.3-1', { - 'checksums': ['0e9a7ab4fb75c2990ff7b85aa0661aaadbf4804f2a92fac9dd6d3c75db346813'], - }), - ('vioplot', '0.3.4', { - 'checksums': ['4914262f2e7913ffa5741e74b20157f4a904ba31e648fa5df9ff6a1aaba753bb'], - }), - ('emulator', '1.2-20', { - 'checksums': ['7cabf2cf74d879ad9dbaed8fdee54a5c94a8658a0645c021d160b2ef712ce287'], - }), - ('gmm', '1.6-4', { - 'checksums': ['03ad5ff37d174e9cef13fa41d866412c57b7cbd9155312831e16a1fcda70bc95'], - }), - ('tmvtnorm', '1.4-10', { - 'checksums': ['1a9f35e9b4899672e9c0b263affdc322ecb52ec198b2bb015af9d022faad73f0'], - }), - ('IDPmisc', '1.1.20', { - 'checksums': ['bcb9cd7b8097e5089d1936286ef310ac2030ea7791350df706382ba470afc67f'], - }), - ('gap', '1.2.2', { - 'checksums': ['9c66a52b371b282b20295676bdd86a11d59a6fb2acddb19170376e1a5c65b834'], - }), - ('qrnn', '2.0.5', { - 'checksums': ['3bd83ee8bd83941f9defdab1b5573d0ceca02bf06759a67665e5b9358ff92f52'], - }), - ('TMB', '1.7.16', { - 'checksums': ['84740a2eaecd2ece7049c82d661fe1688008fdece96d90399d31a5d8a0089e52'], - }), - ('glmmTMB', '1.0.1', { - 'checksums': ['b582ac41fb4390146f1446c6629fec40c6c9c125f99083602f091dc60f0ebd69'], - }), - ('gmp', '0.5-13.6', { - 'checksums': ['39a61618cc9eeabd00665cc5f24721e75f0dec8268059a0d18c907c2adf85a48'], - }), - ('ROI', '0.3-3', { - 'checksums': ['2977604b9def46a3638d56a7efa890f2e84fa320bece693d03c196771466a919'], - }), - ('Rglpk', '0.6-4', { - 'checksums': ['a28dbc3130b9618d6ed2ef718d2c55df8ed8c44a47161097c53fe15fa3bfbfa6'], - }), - ('ROI.plugin.glpk', '0.3-0', { - 'checksums': ['160ac14d20c217ff186912c06d53bccf2a33664977ae4c6fc5113a7ac8533ba8'], - }), - ('spaMM', '3.2.0', { - 'checksums': ['40d54ad52c4839f33baa1e488e1e76042e57083ff7780f9c5640c49340ff2999'], - }), - ('qgam', '1.3.2', { - 'checksums': ['273a40d0bfdc340c049bcb85aea83acd887868d8a69c0062b8399e0b24137a52'], - }), - ('DHARMa', '0.3.0', { - 'checksums': ['1c7ac2f1897ca62e0ebb7367c4b31866515c8503d0fa645fa5e8ac5172310298'], - }), - ('mvnfast', '0.2.5', { - 'checksums': ['21b9fa72d1e3843513908aaacd6c4d876cc7a9339782d0151b24910df2975f88'], - }), - ('bridgesampling', '1.0-0', { - 'checksums': ['9e182e15ba4e0a0fefd6edc58f1939fd971dd5c53c444ca9c1820bb2c1de90ab'], - }), - ('BayesianTools', '0.1.7', { - 'checksums': ['af49389bdeb794da3c39e1d63f59e6219438ecb8613c5ef523b00c6fed5a600c'], - }), - ('gomms', '1.0', { - 'checksums': ['52828c6fe9b78d66bde5474e45ff153efdb153f2bd9f0e52a20a668e842f2dc5'], - }), - ('feather', '0.3.5', { - 'checksums': ['50ff06d5e24d38b5d5d62f84582861bd353b82363e37623f95529b520504adbf'], - }), - ('dummies', '1.5.6', { - 'checksums': ['7551bc2df0830b98c53582cac32145d5ce21f5a61d97e2bb69fd848e3323c805'], - }), - ('SimSeq', '1.4.0', { - 'checksums': ['5ab9d4fe2cb1b7634432ff125a9e04d2f574fed06246a93859f8004e10790f19'], - }), - ('uniqueAtomMat', '0.1-3-2', { - 'checksums': ['f7024e73274e1e76a870ce5e26bd58f76e8f6df0aa9775c631b861d83f4f53d7'], - }), - ('PoissonSeq', '1.1.2', { - 'checksums': ['6f3dc30ad22e33e4fcfa37b3427c093d591c02f1b89a014d85e63203f6031dc2'], - }), - ('aod', '1.3.1', { - 'checksums': ['052d8802500fcfdb3b37a8e3e6f3fbd5c3a54e48c3f68122402d2ea3a15403bc'], - }), - ('cghFLasso', '0.2-1', { - 'checksums': ['6e697959b35a3ceb2baa1542ef81f0335006a5a9c937f0173c6483979cb4302c'], - }), - ('svd', '0.5', { - 'checksums': ['d042d448671355d0664d37fd64dc90932eb780e6494c479d4431d1faae2071a1'], - }), - ('Rssa', '1.0.2', { - 'checksums': ['3991ad98e0170034b06ae8bb5b6337cbc418dc31ce465d02030cedf4ab69ff91'], - }), - ('JBTools', '0.7.2.9', { - 'checksums': ['b33cfa17339df7113176ad1832cbb0533acf5d25c36b95e888f561d586c5d62f'], - }), - ('RUnit', '0.4.32', { - 'checksums': ['23a393059989000734898685d0d5509ece219879713eb09083f7707f167f81f1'], - }), - ('DistributionUtils', '0.6-0', { - 'checksums': ['7443d6cd154760d55b6954142908eae30385672c4f3f838dd49876ec2f297823'], - }), - ('gapfill', '0.9.6', { - 'checksums': ['850d0be9d05e3f3620f0f5143496321f1004ed966299bffd6a67a9abd8d9040d'], - }), - ('gee', '4.13-20', { - 'checksums': ['53014cee059bd87dc22f9679dfbf18fe6813b9ab41dfe90361921159edfbf798'], - }), - ('Matching', '4.9-7', { - 'checksums': ['1956ecb5ebe1c88e2112cd277ae5c2ab4b8d8f60743e6e856a2c2e40aa05fc6d'], - }), - ('MatchIt', '3.0.2', { - 'checksums': ['782b159a2b5172e758e3993177930d604140ae668fd8a7c98c30792df80de9de'], - }), - ('RItools', '0.1-17', { - 'checksums': ['75654780e9ca39cb3c43acfaca74080ad74de50f92c5e36e95694aafdfdc0cea'], - }), - ('optmatch', '0.9-13', { - 'checksums': ['f8f327faa95c808773376570793bbabdbc185a6c7fcdce3b96a09c998134d0d8'], - }), - ('SPAtest', '3.0.2', { - 'checksums': ['7a5e02f636df4c299d3a2d36033f26492b6db51f04a5cd1c2ff17e7ec1a4e831'], - }), - ('SKAT', '2.0.0', { - 'checksums': ['b90be9552f65f0055311ec7a4de5b33520a040f9202aa5872fbfae306c496ce2'], - }), - ('GillespieSSA', '0.6.1', { - 'checksums': ['272e9b6b26001d166fd7ce8d04f32831ba23c676075fbd1e922e27ba2c962052'], - }), - ('startupmsg', '0.9.6', { - 'checksums': ['1d60ff13bb260630f797bde66a377a5d4cd65d78ae81a3936dc4374572ec786e'], - }), - ('distr', '2.8.0', { - 'checksums': ['bb7df05d6b946bcdbbec2e3397c7c7e349b537cabfcbb13a34bcf6312a71ceb7'], - }), - ('distrEx', '2.8.0', { - 'checksums': ['b064cde7d63ce93ec9969c8c4463c1e327758b6f8ea7765217d77f9ba9d590bf'], - }), - ('KODAMA', '1.5', { - 'checksums': ['8ecf53732c1be2bd1e111b3c6de65b66caf28360306e683fe945dc76d4c267dd'], - }), - ('locfdr', '1.1-8', { - 'checksums': ['42d6e12593ae6d541e6813a140b92591dabeb1df94432a515507fc2eee9a54b9'], - }), - ('ica', '1.0-2', { - 'checksums': ['e721596fc6175d3270a60d5e0b5b98be103a8fd0dd93ef16680af21fe0b54179'], - }), - ('dtw', '1.21-3', { - 'checksums': ['1aa46b285b7a31ba19759e83562671ed9076140abec79fe0df0316af43871e0a'], - }), - ('SDMTools', '1.1-221.2', { - 'checksums': ['f0dd8c5f98d2f2c012536fa56d8f7a58aaf0c11cbe3527e66d4ee3194f6a6cf7'], - }), - ('ggridges', '0.5.2', { - 'checksums': ['b03a775df279a71f259470335decf033b0b9e34b7ee5726681b302ae4e11ff0e'], - }), - ('TFisher', '0.2.0', { - 'checksums': ['bd9b7484d6fba0165841596275b446f85ba446d40e92f3b9cb37381a3827e76f'], - }), - ('lsei', '1.2-0', { - 'checksums': ['4781ebd9ef93880260d5d5f23066580ac06061e95c1048fb25e4e838963380f6'], - }), - ('npsurv', '0.4-0', { - 'checksums': ['404cf7135dc40a04e9b81224a543307057a8278e11109ba1fcaa28e87c6204f3'], - }), - ('fitdistrplus', '1.0-14', { - 'checksums': ['85082590f62aa08d99048ea3414c5cc1e5b780d97b3779d2397c6cb435470083'], - }), - ('rappdirs', '0.3.1', { - 'checksums': ['2fd891ec16d28862f65bb57e4a78f77a597930abb59380e757afd8b6c6d3264a'], - }), - ('reticulate', '1.15', { - 'checksums': ['47db3e9c9424263ade15287da8e74f6ba261a936b644b197dba6772853b7b50d'], - }), - ('hdf5r', '1.3.2', { - 'installopts': '--configure-args="--with-hdf5=$EBROOTHDF5/bin/h5cc"', - 'preinstallopts': "unset LIBS && ", - 'checksums': ['31493d9dde9705543e5474c937fa5b4b64895ae1dd6ee51d7039dd95a6015730'], - }), - ('DTRreg', '1.5', { - 'checksums': ['eb9b4d98b25eec304a447db302f618a75180f8d8fe0f5728ecd7e85957613456'], - }), - ('pulsar', '0.3.6', { - 'checksums': ['b5851bf365003ace07542fd21ccff015c4b21ffd73e21ec3a539563e9ef53564'], - }), - ('bayesm', '3.1-4', { - 'checksums': ['061b216c62bc72eab8d646ad4075f2f78823f9913344a781fa53ea7cf4a48f94'], - }), - ('energy', '1.7-7', { - 'checksums': ['67b88fb33ee6e7bec2e4fe356a4efd36f70c3cf9b0ebe2f6d9da9ec96de9968f'], - }), - ('compositions', '1.40-5', { - 'checksums': ['879e296037b0b3c52cfe48556820500b94d4eea16ec2b40f85988b65c5f72a51'], - }), - ('clustree', '0.4.2', { - 'checksums': ['5d6b8ee3cbbcdd235a7abe4107429e45847ed09ec1cdb572ad6efb9d88dff82e'], - }), - ('plotly', '4.9.2.1', { - 'checksums': ['f45eae325ab7e7924b0be098bad866ce003d657cf63e137104401c2dd4401db8'], - }), - ('tweedie', '2.3.2', { - 'checksums': ['9a6226e64e3d56eb7eb2a408f8b825c2ad6ee0ea203a9220e85e7789514adb81'], - }), - ('RcppGSL', '0.3.7', { - 'checksums': ['45e95c4170fc8421ae9b32134b3a402f76ea9657030969723a3563c7ce14dc32'], - }), - ('mvabund', '4.1.3', { - 'checksums': ['4b98049026fcc5a262163f6801d5b98b8543267cf7b0edac8382d5311b81a8fc'], - }), - ('fishMod', '0.29', { - 'checksums': ['5989e49ca6d6b2c5d514655e61f75b019528a8c975f0d6056143f17dc4277a5d'], - }), - ('gllvm', '1.2.1', { - 'checksums': ['a9dca68227a8f89c61950f6411de3b988e6e067d97fadc589f69ddd731c2e1ff'], - }), - ('grpreg', '3.2.2', { - 'checksums': ['e59f576ee5d794444917e0fbdab0d1ebf4aa71967c9a35ec196899ed5b168388'], - }), - ('trust', '0.1-8', { - 'checksums': ['952e348b62aec35988b103fd152329662cb6a451538f184549252fbf49d7dcac'], - }), - ('ergm', '3.10.4', { - 'checksums': ['885f0b1a23c5a2c1947962350cfab66683dfdfd1db173c115e90396d00831f22'], - }), - ('networkDynamic', '0.10.1', { - 'checksums': ['22eed8d9dea8d70877c1619eb2bc3f1ac5142ce3db6fd6eb3e0879ca56b76ca0'], - }), - ('tergm', '3.6.1', { - 'checksums': ['21de2eca943d89ba63af14951655d626f241bafccc4b2709fa39aa130625cd0f'], - }), - ('ergm.count', '3.4.0', { - 'checksums': ['7c24c79d0901c18991cce907306a1531cca676ae277c6b0a0e4962ad27c36baf'], - }), - ('tsna', '0.3.1', { - 'checksums': ['bba4b5e04ba647784581a2137f653f60b4c83cfd726c399556054c5a6d2cbd95'], - }), - ('statnet', '2019.6', { - 'checksums': ['0903e1a81ed1b6289359cefd12da1424c92456d19e062c3f74197b69e536b29d'], - }), - ('aggregation', '1.0.1', { - 'checksums': ['86f88a02479ddc8506bafb154117ebc3b1a4a44fa308e0193c8c315109302f49'], - }), - ('ComICS', '1.0.4', { - 'checksums': ['0af7901215876f95f309d7da6e633c38e4d7faf04112dd6fd343bc15fc593a2f'], - }), - ('dtangle', '2.0.9', { - 'checksums': ['c375068c1877c2e8cdc5601cfd5a9c821645c3dff90ddef64817f788f372e179'], - }), - ('mcmc', '0.9-7', { - 'checksums': ['b7c4d3d5f9364c67a4a3cd49296a61c315ad9bd49324a22deccbacb314aa8260'], - }), - ('MCMCpack', '1.4-6', { - 'checksums': ['6bcd018d6fa589a6854ee1bcea18b9d6c4095f3deae9058f69afbb09cba873c7'], - }), - ('shinythemes', '1.1.2', { - 'checksums': ['2e13d4d5317fc61082e8f3128b15e0b10ed9736ce81e152dd7ae7f6109f9b18a'], - }), - ('csSAM', '1.2.4', { - 'checksums': ['3d6442ad8c41fa84633cbbc275cd67e88490a160927a5c55d29da55a36e148d7'], - }), - ('bridgedist', '0.1.0', { - 'checksums': ['dc7c1c8874d6cfa34d550d9af194389e13471dfbc55049a1ab66db112fbf1343'], - }), - ('asnipe', '1.1.12', { - 'checksums': ['3a1f166f1c71b5877a2acca1384ec6c9b430b67af67ef26125f2abbb53c66206'], - }), - ('oddsratio', '2.0.0', { - 'checksums': ['89bf3c68a6ded6a98f4ee8d487c29605ad00ac5f8db9b8bf1a52144e65332553'], - }), - ('mltools', '0.3.5', { - 'checksums': ['7093ffceccdf5d4c3f045d8c8143deaa8ab79935cc6d5463973ffc7d3812bb10'], - }), - ('h2o', '3.30.0.1', { - 'checksums': ['cb11ef58a7d7dfd3a9193686ddd9c8a9f988b33a69656d8b3e8f59082068b0f5'], - }), - ('mlegp', '3.1.7', { - 'checksums': ['d4845eaf9260f8b8112726dd7ceb5c2f5ce75125fa313191db9de121f2ee15e0'], - }), - ('itertools', '0.1-3', { - 'checksums': ['b69b0781318e175532ad2d4f2840553bade9637e04de215b581704b5635c45d3'], - }), - ('missForest', '1.4', { - 'checksums': ['f785804b03bdf424e1c76095989a803afb3b47d6bebca9a6832074b6326c0278'], - }), - ('bartMachineJARs', '1.1', { - 'checksums': ['f2c31cb94d7485174a2519771127a102e35b9fe7f665e27beda3e76a56feeef2'], - }), - ('bartMachine', '1.2.4.2', { - 'checksums': ['28a5f7363325021bd93f9bd060cc48f20c689dae2f2f6f7100faae66d7651f80'], - }), - ('lqa', '1.0-3', { - 'checksums': ['3889675dc4c8cbafeefe118f4f20c3bd3789d4875bb725933571f9991a133990'], - }), - ('PresenceAbsence', '1.1.9', { - 'checksums': ['1a30b0a4317ea227d674ac873ab94f87f8326490304e5b08ad58953cdf23169f'], - }), - ('GUTS', '1.1.1', { - 'checksums': ['094b8f51719cc36ddc56e3412dbb146eafc93c5e8fbb2c5999c2e80ea7a7d216'], - }), - ('GenSA', '1.1.7', { - 'checksums': ['9d99d3d0a4b7770c3c3a6de44206811272d78ab94481713a8c369f7d6ae7b80f'], - }), - ('parsedate', '1.2.0', { - 'checksums': ['39ab3c507cb3efcd677c6cf453f46d6b1948662bd70c7765845e755ea1e1633d'], - }), - ('circular', '0.4-93', { - 'checksums': ['76cee2393757390ad91d3db3e5aeb2c2d34c0a46822b7941498571a473417142'], - }), - ('cobs', '1.3-4', { - 'checksums': ['a1c7b77e4ca097349884fd1c0d863d74f9092766131094d603f34d33ab2e3c42'], - }), - ('resample', '0.4', { - 'checksums': ['f0d5f735e1b812612720845d79167a19f713a438fd10a6a3206e667045fd93e5'], - }), - ('MIIVsem', '0.5.4', { - 'checksums': ['de918d6b1820c59a7d4324342ad15444c2370ce1d843397a136c307397ed64b9'], - }), - ('medflex', '0.6-6', { - 'checksums': ['b9d04fb5281d0ea0555ec4f327a0ee951a7f312a3af944578dc175183dc49211'], - }), - ('Rserve', '1.7-3.1', { - 'checksums': ['3ba1e919706e16a8632def5f45d666b6e44eafa6c14b57064d6ddf3415038f99'], - }), - ('spls', '2.2-3', { - 'checksums': ['bbd693da80487eef2939c37aba199f6d811ec289828c763d9416a05fa202ab2e'], - }), - ('Boruta', '6.0.0', { - 'checksums': ['1c9a7aabe09f040e147f6c614f5fe1d0b951d3b0f0024161fbb4c31da8fae8de'], - }), - ('dr', '3.0.10', { - 'checksums': ['ce523c1bdb62a9dda30afc12b1dd96975cc34695c61913012236f3b80e24bf36'], - }), - ('CovSel', '1.2.1', { - 'checksums': ['b375d00cc567e125ff106b4357654f43bba3abcadeed2238b6dea4b7a68fda09'], - }), - ('tmle', '1.4.0.1', { - 'checksums': ['075e7b7fe0496e02785eb35aed0db84476db756c6f14a0047808af2565b33501'], - }), - ('ctmle', '0.1.2', { - 'checksums': ['e3fa0722cd87aa0e0b209c2dddf3fc44c6d09993f1e66a6c43285fe950948161'], - }), - ('BayesPen', '1.0', { - 'checksums': ['772df9ae12cd8a3da1d5b7d1f1629602c7693f0eb03945784df2809e2bb061b0'], - }), - ('inline', '0.3.15', { - 'checksums': ['ff043fe13c1991a3b285bed256ff4a9c0ba10bee764225a34b285875b7d69c68'], - }), - ('BMA', '3.18.12', { - 'checksums': ['cbabb77d92b09a11a986ad03950322e78dff743f82ff67fda51d32e44135dd79'], - }), - ('BCEE', '1.3.0', { - 'checksums': ['82afc9b8c6d617f5f728341960ae32922194f637c550916b3bea12c231414fa7'], - }), - ('bacr', '1.0.1', { - 'checksums': ['c847272e2c03fd08ed79b3b739f57fe881af77404b6fd087caa0c398c90ef993'], - }), - ('clue', '0.3-57', { - 'checksums': ['6e369d07b464a9624209a06b5078bf988f01f7963076e946649d76aea0622d17'], - }), - ('bdsmatrix', '1.3-4', { - 'checksums': ['251e21f433a016ec85e478811ea3ad34c572eb26137447f48d1bbf3cc8bb06ea'], - }), - ('fftwtools', '0.9-8', { - 'checksums': ['4641c8cd70938c2a8bde0b6da6cf7f83e96175ef52f1ca42ec3920a1dabf1bdb'], - }), - ('imagerExtra', '1.3.2', { - 'checksums': ['0ebfa1eabb89459d774630ab73c7a97a93b9481ea5afc55482975475acebd5b8'], - }), - ('MALDIquant', '1.19.3', { - 'checksums': ['a730327c1f8d053d29e558636736b7b66d0671a009e0004720b869d2c76ff32c'], - }), - ('threejs', '0.3.3', { - 'checksums': ['76c759c8b20fb34f4f7a01cbd1b961296e1f19f4df6dded69aae7f1bca80219c'], - }), - ('LaplacesDemon', '16.1.4', { - 'checksums': ['4152a1c3c652979e97870e5c50c45a243d0ad8d4ff968091160e3d66509f61db'], - }), - ('rda', '1.0.2-2.1', { - 'checksums': ['eea3a51a2e132a023146bfbc0c384f5373eb3ea2b61743d7658be86a5b04949e'], - }), - ('sampling', '2.8', { - 'checksums': ['356923f35971bb55f7e97b178aede3366374aa3ad3d24a97be765660553bf21a'], - }), - ('lda', '1.4.2', { - 'checksums': ['5606a1e1bc24706988853528023f7a004c725791ae1a7309f1aea2fc6681240f'], - }), - ('jiebaRD', '0.1', { - 'checksums': ['045ee670f5378fe325a45b40fd55136b355cbb225e088cb229f512c51abb4df1'], - }), - ('jiebaR', '0.11', { - 'checksums': ['adde8b0b21c01ec344735d49cd33929511086719c99f8e10dce4ca9479276623'], - }), - ('hdm', '0.3.1', { - 'checksums': ['ba087565e9e0a8ea30a6095919141895fd76b7f3c05a03e60e9e24e602732bce'], - }), - ('abe', '3.0.1', { - 'checksums': ['66d2e9ac78ba64b7d27b22b647fc00378ea832f868e51c18df50d6fffb8029b8'], - }), - ('SignifReg', '3.0', { - 'checksums': ['ada4e1f8cbb08ba8ff16275ec5f9a453857e0cab63b70d42753989ab4c716b7b'], - }), - ('bbmle', '1.0.23.1', { - 'checksums': ['60421eb01190b741ab14885eaf1088f51d49dcf70e58c42b360489bca04e745c'], - }), - ('emdbook', '1.3.12', { - 'checksums': ['0646caf9e15aaa61ff917a4b5fdf82c06ac17ef221a61dec3fbb554e7bff4353'], - }), - ('SOAR', '0.99-11', { - 'checksums': ['d5a0fba3664087308ce5295a1d57d10bad149eb9771b4fe67478deae4b7f68d8'], - }), - ('rasterVis', '0.47', { - 'checksums': ['123ebe870895c2ba3a4b64d8a18bccab5287c831fa14bb0fe07f0d7de61e51d3'], - }), - ('tictoc', '1.0', { - 'checksums': ['47da097c1822caa2d8e262381987cfa556ad901131eb96109752742526b2e2fe'], - }), - ('ISOcodes', '2020.03.16', { - 'checksums': ['160eb4ea23be53305e4e728002c8f5d3852d89155f538deccb734e7c8ad4e1c3'], - }), - ('stopwords', '2.0', { - 'checksums': ['5cca60ce9f44406486e0dca2e36cec2488096c3558b45fc3bd0e7b6d1500af94'], - }), - ('janeaustenr', '0.1.5', { - 'checksums': ['992f6673653daf7010fe176993a01cd4127d9a88be428da8da7a28241826d6f3'], - }), - ('SnowballC', '0.7.0', { - 'checksums': ['b10fee9d322f567a22c580b49b5d4ba1c86eae40a71794ca92552c726b3895f3'], - }), - ('tokenizers', '0.2.1', { - 'checksums': ['28617cdc5ddef5276abfe14a2642999833322b6c34697de1d4e9d6dc7670dd00'], - }), - ('hunspell', '3.0', { - 'checksums': ['01fb9c87f7cf094aaad3b7098378134f2e503286224351e91d08c00b6ee19857'], - }), - ('topicmodels', '0.2-11', { - 'checksums': ['9c26b4d967be6ec26834a39f04aa92b059ea9503eb70c700e1c0a7a43637b74a'], - }), - ('tidytext', '0.2.4', { - 'checksums': ['46ff59063b6a519c9eb606ae135ef31d7073ac729e4a912c9f77e234801b933d'], - }), - ('splitstackshape', '1.4.8', { - 'checksums': ['656032c3f1e3dd5b8a3ee19ffcae617e07104c0e342fc3da4d863637a770fe56'], - }), - ('grImport2', '0.2-0', { - 'checksums': ['a102a2d877e42cd4e4e346e5510a77b2f3e57b43ae3c6d5c272fdceb506b00a7'], - }), - ('preseqR', '4.0.0', { - 'checksums': ['0143db473fb9a811f9cf582a348226a5763e62d9857ce3ef4ec41412abb559bc'], - }), - ('idr', '1.2', { - 'checksums': ['8bbfdf82c8c2b5c73eb079127e198b6cb65c437bb36729f502c7bcd6037fdb16'], - }), - ('entropy', '1.2.1', { - 'checksums': ['edb27144b8f855f1ef21de6b93b6b6c5cf7d4f2c3d592bf625e5158c02226f83'], - }), - ('kedd', '1.0.3', { - 'checksums': ['38760abd8c8e8f69ad85ca7992803060acc44ce68358de1763bd2415fdf83c9f'], - }), - ('HiddenMarkov', '1.8-11', { - 'checksums': ['4a1614249eee9f428bc182ea9ced443dff4eafa7babf4259c720e5b4da2d08fa'], - }), - ('lmerTest', '3.1-2', { - 'checksums': ['385870873fd303c2caa4ac43e2df0ca5aa36ddb484bfb4eefbc5c4ac4bef6de2'], - }), - ('loo', '2.2.0', { - 'checksums': ['466df60953a89fcb135b32909197c3ff26ecea719c191667faa5747324fb01c3'], - }), - ('StanHeaders', '2.21.0-1', { - 'checksums': ['4e94148af2960f203b208c2b725d8be628ca282c7a1e967a7e6e78aa5eb90d3f'], - }), - ('rstan', '2.19.3', { - 'checksums': ['d7025dccdc2337fd0cf3b9689c3e0a07558a47a08b7cbc370e0b3998256f1689'], - }), - ('Rborist', '0.2-3', { - 'checksums': ['f3b3f953ca99e0d17425ac6ba9a7b1e9d6098343abace575cdb492bca2a9c461'], - }), - ('VSURF', '1.1.0', { - 'checksums': ['eee99e0c441795c2ccb21cc6e0a37b24f580241e494c83e811b726b43469eeab'], - }), - ('mRMRe', '2.1.0', { - 'checksums': ['fe23c5c1e7b5b653e0358e98f25ebd8c0c74c6c871606d1b24cd02a5534181d4'], - }), - ('dHSIC', '2.1', { - 'checksums': ['94c86473790cf69f11c68ed8ba9d6ae98218c7c69b7a9a093f235d175cf83db0'], - }), - ('ggsci', '2.9', { - 'checksums': ['4af14e6f3657134c115d5ac5e65a2ed74596f9a8437c03255447cd959fe9e33c'], - }), - ('ggsignif', '0.6.0', { - 'checksums': ['6fe13efda31386483e64d466ba2f5a53a2a235ae04f5c17bba3ccc63d283499e'], - }), - ('corrplot', '0.84', { - 'checksums': ['0dce5e628ead9045580a191f60c58fd7c75b4bbfaaa3307678fc9ed550c303cc'], - }), - ('rstatix', '0.5.0', { - 'checksums': ['06b2ba1b16698c93203474eb67e59169fa4127283d46cf694e032a5472956b46'], - }), - ('ggfan', '0.1.3', { - 'checksums': ['5c888b203ecf5e3dc7a317a790ca059c733002fbca4b4bc1a4f62b7ded5f70dc'], - }), - ('ggpubr', '0.3.0', { - 'checksums': ['b82ffc6bd5974c3036d08393fa1f2bafeaf6f567e0b3faf43e38226b19399eb6'], - }), - ('yaImpute', '1.0-32', { - 'checksums': ['08eee5d851b80aad9c7c80f9531aadd50d60e4b16b3a80657a50212269cd73ff'], - }), - ('intrinsicDimension', '1.2.0', { - 'checksums': ['6cc9180a83aa0d123f1e420136bb959c0d5877867fa170b79536f5ee22106a32'], - }), - ('patchwork', '1.0.0', { - 'checksums': ['8bfb59b91775781848f39eedcaaaf92c147e2637f384085fcdd41fc8355b3c63'], - }), - ('leiden', '0.3.3', { - 'checksums': ['c2b5e1c061c8bbea494639a0d7f0fa22f5b41ff5fd911409e5f832f3575d06c2'], - }), - ('future.apply', '1.4.0', { - 'checksums': ['737e5a8e2d0ce2678835f5be15c96d491f690d307662ed6719be41937633f8cd'], - }), - ('sctransform', '0.2.1', { - 'checksums': ['d6430a81a66c93da770b1a7c55344df42187321038b4eee80b7066cdd8a7631f'], - }), - ('packrat', '0.5.0', { - 'checksums': ['d6a09290fbe037a6c740921c5dcd70b500e5b36e4713eae4010adf0c456bc5f7'], - }), - ('shinyjs', '1.1', { - 'checksums': ['8986181baa68fb2863eea65b9df1b04b9b4e1293685298531d42de3bc2f06892'], - }), - ('colourpicker', '1.0', { - 'checksums': ['f1dacbafb05c09f61b9bdd0fdcee5344409759b042a71ec46d7c9e3710107b7c'], - }), - ('ggExtra', '0.9', { - 'checksums': ['f22db92d6e3e610901998348acbcaa6652fa6c62a285a622d3b962ba9e89aba2'], - }), - ('findpython', '1.0.5', { - 'checksums': ['3e9a21988cb78833769b02680d128a0cc01bcb41aa9c9725ab1742f349759145'], - }), - ('argparse', '2.0.1', { - 'checksums': ['949843920d14fc7c162aedab331a936499541736e7dafbb103fbfd79be8147ab'], - }), - ('intergraph', '2.0-2', { - 'checksums': ['6cbe77f1e87fa1c110db2d46010f2f3ae72bfdb708ce2ca84c1cdc2cd6eb47a1'], - }), - ('ggnetwork', '0.5.8', { - 'checksums': ['a8c7c19a2bafce898c95d0b2401ef052925db57b85058c7203f0122b3af7bbbd'], - }), - ('qqman', '0.1.4', { - 'checksums': ['3ad01f82132bf75960ae0d8a81cae84eaf4a9ab262f183fc3d6439189e4a3aed'], - }), - ('rstantools', '2.0.0', { - 'checksums': ['d200a4d8c62d8577fdba819bf770e7abb11c57c3332f4498e1d30ce824598b3a'], - }), - ('bayesplot', '1.7.1', { - 'checksums': ['820ca9ca3258fc68333e75fd60898c0d0f08f513b66c161ca6159a54ad54006b'], - }), - ('dygraphs', '1.1.1.6', { - 'checksums': ['c3d331f30012e721a048e04639f60ea738cd7e54e4f930ac9849b95f0f005208'], - }), - ('rsconnect', '0.8.16', { - 'checksums': ['3f728c6a5153dca28f69b9355ae9d18c5f7e70d12495c0c047eef673c1053116'], - }), - ('shinystan', '2.5.0', { - 'checksums': ['45f9c552a31035c5de8658bb9e5d72da7ec1f88fbddb520d15fe701c677154a1'], - }), - ('brms', '2.12.0', { - 'checksums': ['fa21505dca65d027f1cf1c573258de5f3c51ca8b94abd6dcf9123a3a27a72999'], - }), - ('drgee', '1.1.10', { - 'checksums': ['e684f07f7dfec922380d4202922c11094f859721f77b31ff38b0d35d0f42c743'], - }), - ('stdReg', '3.4.0', { - 'checksums': ['b423df43e9c4bb8ffafe2de88b93fdcf3a90f964f136580ea1b849a83dba7400'], - }), - ('mcmcse', '1.4-1', { - 'checksums': ['6b181f56d60ddf55c9c08a2468ef9ffe3ec8a1b16cfa9a7742c3872597f85d17'], - }), - ('copCAR', '2.0-3', { - 'checksums': ['e626380d3f11ca6c756381f2423ef3661efb52667147114253416cc6151a71b8'], - }), - ('batchmeans', '1.0-4', { - 'checksums': ['8694573009d9070a76007281407d3314da78902e122a9d8aec1f819d3bbe562c'], - }), - ('ngspatial', '1.2-2', { - 'checksums': ['3fa79e45d3a502a58c1454593ec83dfc73144e92b34c14f617a6126557dd0d26'], - }), - ('BIGL', '1.4.3', { - 'checksums': ['7ef7edd5c1852409f1da176e810a9a2a7f0b9441e0e6459d4d16179f3eb000f4'], - }), - ('drugCombo', '1.1.1', { - 'checksums': ['9fdc3a7cf63552c32f1c7573258fc4ceacdaf5c475fe79aa4ca8c9226b9f8a38'], - }), - ('betareg', '3.1-3', { - 'checksums': ['cc19387ec516492d11cf59cdfa07e1733950a2af8196c1e155bc95939bc76246'], - }), - ('unmarked', '1.0.1', { - 'checksums': ['a3bb9bdc7a4a79ea38482df3f8cbb6e9082332a0d894eeb4b3dc816344cec0e4'], - }), - ('maxlike', '0.1-8', { - 'checksums': ['90aaab9602f259cbfae61fe96e105cc4a0c2a385b42380f85c14f5d544107251'], - }), - ('coxme', '2.2-16', { - 'checksums': ['a0ce4b5649c4c1abbfe2c2bf23089744d1f66eb8368dea16e74e090f366a5111'], - }), - ('AICcmodavg', '2.3-0', { - 'checksums': ['4d6bcff3c549be9dcefdd849b239659618fdaf9ba0d27b9d0589620d104e5e24'], - }), - ('pacman', '0.5.1', { - 'checksums': ['9ec9a72a15eda5b8f727adc877a07c4b36f8372fe7ed80a1bc6c2068dab3ef7c'], - }), - ('spaa', '0.2.2', { - 'checksums': ['a5a54454d4a7af473ce797875f849bd893005cb04325bf3e0dbddb19fe8d7198'], - }), - ('maxnet', '0.1.2', { - 'checksums': ['dfa02ca1031b369415d8b16863ca5fd115c7bf96b1f8fc24f91719b017f5cce5'], - }), - ('ENMeval', '0.3.0', { - 'checksums': ['1c924098a27c82a9bf75408173b47429c40cc41cae3aba18c86ba217bb45ba60'], - }), - ('plotmo', '3.5.7', { - 'checksums': ['aa366a49a63cabfd5d799a1524e23a4faed022f10ee60f8407d70ab70731b38c'], - }), - ('earth', '5.1.2', { - 'checksums': ['326f98e8c29365ca3cd5584cf2bd6529358f5ef81664cbd494162f92b6c3488d'], - }), - ('mda', '0.5-2', { - 'checksums': ['344f2053215ddf535d1554b4539e9b09067dac878887cc3eb995cef421fc00c3'], - }), - ('biomod2', '3.4.6', { - 'checksums': ['41fd7745f4d0af3f799e9cf4fa5484a427de6854db84c6476fde7a7414787d5b'], - }), - ('ncdf4', '1.17', { - 'checksums': ['db95c4729d3187d1a56dfd019958216f442be6221bd15e23cd597e6129219af6'], - }), - ('mapdata', '2.3.0', { - 'checksums': ['1edc180990387b6b1cd4e43a9505ebeb98e6115e4205c4f32f05b397c781dd76'], - }), - ('wavelets', '0.3-0.2', { - 'checksums': ['22d1d6bff71574a37b047ee5c31d17d52e321d2dd54db1a221f2d7267536e01c'], - }), - ('biwavelet', '0.20.19', { - 'checksums': ['2b9f99e4f56cee87ee2db53abcece205ff592887a9c9cf298c875a1ea594b25d'], - }), - ('wavethresh', '4.6.8', { - 'checksums': ['93b369c6eabcc0187b860b31d84a9d7c72c4a2ed5b23c1520c93bd7bea22e7db'], - }), - ('splancs', '2.01-40', { - 'checksums': ['79744381ebc4a361740a36dca3c9fca9ae015cfe0bd585b7856a664a3da74363'], - }), - ('RandomFieldsUtils', '0.5.3', { - 'checksums': ['ea823cba2e254a9f534efb4b772c0aeef2039ee9ef99744e077b969a87f8031d'], - }), - ('RandomFields', '3.3.8', { - 'checksums': ['8a08e2fdae428e354a29fb6818ae781cc56235a6849a0d29574dc756f73199d0'], - }), - ('geoR', '1.8-1', { - 'checksums': ['990647804590b925a50f72897b24bbabd331cebef0be1696a60528b2f79d6fd3'], - }), - ('intervals', '0.15.2', { - 'checksums': ['0bd23b0ce817ddd851238233d8a5420bf3a6d29e75fd361418cbc50118777c57'], - }), - ('spacetime', '1.2-3', { - 'checksums': ['ca7c0b962d5da0741f6dd85b271d693598756e0eeeb364ada828dbb6d1b9b25b'], - }), - ('gstat', '2.0-6', { - 'checksums': ['6711e68aa2444cf2927879a03a976d8caeca5eac98d806b19a6a7178b90bfcab'], - }), - ('rgeos', '0.5-3', { - 'checksums': ['357454e110ae19a665d5af5ffd7d670d2d7471566dd638dc614365c29b68600b'], - }), - ('repr', '1.1.0', { - 'checksums': ['743fe018f9e3e54067a970bc38b6b8c0c0498b43f88d179ac4a959c2013a5f96'], - }), - ('IRdisplay', '0.7.0', { - 'checksums': ['91eac9acdb92ed0fdc58e5da284aa4bb957ada5eef504fd89bec136747999089'], - }), - ('pbdZMQ', '0.3-3.1', { - 'checksums': ['9e034745cd9c1bdf510a2ec8e7060501abc92ec86020c430ed7e36f5d5cf1523'], - }), - ('IRkernel', '1.1.1', { - 'checksums': ['f5a129168f44bdda6da8cc907189a2737f692d427529515d87312a17dbd806f8'], - }), - # language server support - ('collections', '0.3.5', { - 'checksums': ['bf76ab5c6a8082b6bb70b9bf3bdb30658e823e3b7b28cf7be7e8a87d117a7114'], - }), - ('xmlparsedata', '1.0.4', { - 'checksums': ['387b13c25bea9ddc0a39b817c17c199b86ab9acafa328daae2233a9ca577fb9c'], - }), - ('cyclocomp', '1.1.0', { - 'checksums': ['cdbf65f87bccac53c1527a2f1269ec7840820c18503a7bb854910b30b71e7e3e'], - }), - ('lintr', '2.0.1', { - 'checksums': ['fe0723757b653ef83ec7a5005d0a7524cd917d646d35a5627ee639158881ce93'], - }), - ('styler', '1.3.2', { - 'checksums': ['3fcf574382c607c2147479bad4f9fa8b823f54fb1462d19ec4a330e135a44ff1'], - }), - ('mockery', '0.4.2', { - 'checksums': ['988e249c366ee7faf277de004084cf5ca24b5c8a8c6e3842f1b1362ce2f7ea9b'], - }), - ('languageserver', '0.3.8', { - 'checksums': ['371db6976d6066d654c9d31f911dba667c1f8ceb4ab67da34d44037b66f3ca9b'], - }), -] - -moduleclass = 'lang' diff --git a/Golden_Repo/r/R/R-4.0.2-gpsmkl-2020.eb b/Golden_Repo/r/R/R-4.0.2-gpsmkl-2020.eb deleted file mode 100644 index fb9b2a4ee5787a8c8ed54519573aaa64ad78aa1a..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/R/R-4.0.2-gpsmkl-2020.eb +++ /dev/null @@ -1,2571 +0,0 @@ -name = 'R' -version = '4.0.2' - -homepage = 'https://www.r-project.org/' -description = """R is a free software environment for statistical computing - and graphics.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} - -source_urls = ['https://cloud.r-project.org/src/base/R-%(version_major)s'] -sources = [SOURCE_TAR_GZ] -checksums = ['d3bceab364da0876625e4097808b42512395fdf41292f4915ab1fd257c1bbe75'] - -builddependencies = [ - ('pkg-config', '0.29.2'), -] -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('cairo', '1.17.2'), - ('libreadline', '8.0'), - ('ncurses', '6.2'), - ('bzip2', '1.0.8'), - ('XZ', '5.2.5'), - ('zlib', '1.2.11'), - ('SQLite', '3.32.3'), - ('PCRE2', '10.34'), - ('libpng', '1.6.37'), # for plotting in R - ('libjpeg-turbo', '2.0.5'), # for plottting in R - ('LibTIFF', '4.1.0'), - ('Java', '1.8', '', SYSTEM), - ('Tk', '8.6.10'), # for tcltk - ('cURL', '7.71.1'), # for RCurl - ('libxml2', '2.9.10'), # for XML - ('GDAL', '3.1.2', '-Python-3.8.5'), # for rgdal - ('PROJ', '7.1.0'), # for rgdal - ('GMP', '6.2.0'), # for igraph - ('NLopt', '2.6.2'), # for nloptr - ('FFTW', '3.3.8'), # for fftw - ('libsndfile', '1.0.28'), # for seewave - ('ICU', '67.1'), # for rJava & gdsfmt - ('HDF5', '1.10.6'), # for hdf5r - ('UDUNITS', '2.2.26'), # for units - ('GSL', '2.6'), # for RcppGSL - ('ImageMagick', '7.0.10-25'), # for animation - ('GLPK', '4.65'), # for Rglpk - ('netCDF', '4.7.4'), # the ndf4 needs it - ('GEOS', '3.8.1', '-Python-3.8.5'), # for rgeos - ('ZeroMQ', '4.3.3'), # for pbdZMQ needed by IRkernel - - # OS dependency should be preferred if the os version is more recent then - # this version, it's nice to have an up to date openssl for security - # reasons - # ('OpenSSL', '1.1.1e'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -preconfigopts = 'export LDFLAGS="$LDFLAGS -lm" && ' - -configopts = "--with-pic --enable-threads --enable-R-shlib" -# some recommended packages may fail in a parallel build (e.g. Matrix), and -# we're installing them anyway below -configopts += " --with-recommended-packages=no" - -# specify that at least EasyBuild v3.5.0 is required, -# since we rely on the updated easyblock for R to configure correctly w.r.t. -# BLAS/LAPACK -easybuild_version = '4.2.2' - -exts_default_options = { - 'source_urls': [ - 'https://cran.r-project.org/src/contrib/Archive/%(name)s', # package archive - 'https://cran.r-project.org/src/contrib/', # current version of packages - 'https://cran.freestatistics.org/src/contrib', # mirror alternative for current packages - ], - 'source_tmpl': '%(name)s_%(version)s.tar.gz', -} - -# !! order of packages is important !! -# packages updated on 23 July 2020 -exts_list = [ - 'base', - 'datasets', - 'graphics', - 'grDevices', - 'grid', - 'methods', - 'splines', - 'stats', - 'stats4', - 'tools', - 'utils', - ('Rmpi', '0.6-6', { - 'patches': ['Rmpi_0_6_9_psmpi_2020.patch'], - 'checksums': [ - 'd8fc09ad38264697caa86079885a7a1098921a3116d5a77a62022b9508f8a63a', # Rmpi_0.6-6.tar.gz - '7d5ac8a4531aeb9f4ed44f47e720392ad67bcfa37815a3ad5de4233353e19e05', # Rmpi_0_6_9_psmpi_2020.patch - ], - }), - ('abind', '1.4-5', { - 'checksums': ['3a3ace5afbcb86e56889efcebf3bf5c3bb042a282ba7cc4412d450bb246a3f2c'], - }), - ('magic', '1.5-9', { - 'checksums': ['fa1d5ef2d39e880f262d31b77006a2a7e76ea38e306aae4356e682b90d6cd56a'], - }), - ('Rcpp', '1.0.4.6', { - 'checksums': ['45af675ddbbe155e671453b2e84fe32250bb98d4ccb4342b61c1e25cff10b302'], - }), - ('RcppProgress', '0.4.2', { - 'checksums': ['b1624b21b7aeb1dafb30f092b2a4bef4c3504efd2d6b00b2cdf55dc9df194b48'], - }), - ('lpSolve', '5.6.15', { - 'checksums': ['4627be4178abad34fc85a7d264c2eb5e27506f007e46687b0b8a4f8fbdf4f3ba'], - }), - ('linprog', '0.9-2', { - 'checksums': ['8937b2e30692e38de1713f1513b78f505f73da6f5b4a576d151ad60bac2221ce'], - }), - ('geometry', '0.4.5', { - 'checksums': ['8fedd17c64468721d398e3c17a39706321ab71098b29f5e8d8039dd115a220d8'], - }), - ('bit', '1.1-15.2', { - 'checksums': ['0b83e78385293d6cdc0189a07fcc3f9f9db286c8c4af3288467f5257e79cb28b'], - }), - ('filehash', '2.4-2', { - 'checksums': ['b6d056f75d45e315943a4618f5f62802612cd8931ba3f9f474b595140a3cfb93'], - }), - ('ff', '2.2-14.2', { - 'checksums': ['f8c06ac333ffe3545cdf56531619865176e1827728f7aeeba82d135d5c5e37e4'], - }), - ('bnlearn', '4.5', { - 'checksums': ['a8047625533260a855d309b3c0785cbeec0f9ec13f284b6664a1f61638138578'], - }), - ('bootstrap', '2019.6', { - 'checksums': ['5252fdfeb944cf1fae35016d35f9333b1bd1fc8c6d4a14e33901160e21968694'], - }), - ('combinat', '0.0-8', { - 'checksums': ['1513cf6b6ed74865bfdd9f8ca58feae12b62f38965d1a32c6130bef810ca30c1'], - }), - ('deal', '1.2-39', { - 'checksums': ['a349db8f1c86cbd8315c068da49314ce9eb585dbb50d2e5ff09300506bd8806b'], - }), - ('fdrtool', '1.2.15', { - 'checksums': ['65f964aa768d0703ceb7a199adc5e79ca79a6d29d7bc053a262eb533697686c0'], - }), - ('formatR', '1.7', { - 'checksums': ['a366621b3ff5f8e86a499b6f87858ad47eefdace138341b1377ecc307a5e5ddb'], - }), - ('gtools', '3.8.2', { - 'checksums': ['503ba60a41f3c61b8129c25de62c74dab29761d2e661d4addd106e2e02f1dcde'], - }), - ('gdata', '2.18.0', { - 'checksums': ['4b287f59f5bbf5fcbf18db16477852faac4a605b10c5284c46b93fa6e9918d7f'], - }), - ('GSA', '1.03.1', { - 'checksums': ['e192d4383f53680dbd556223ea5f8cad6bae62a80a337ba5fd8d05a8aee6a917'], - }), - ('highr', '0.8', { - 'checksums': ['4bd01fba995f68c947a99bdf9aca15327a5320151e10bd0326fad50a6d8bc657'], - }), - ('infotheo', '1.2.0', { - 'checksums': ['9b47ebc3db5708c88dc014b4ffec6734053a9c255a9241fcede30fec3e63aaa3'], - }), - ('lars', '1.2', { - 'checksums': ['64745b568f20b2cfdae3dad02fba92ebf78ffee466a71aaaafd4f48c3921922e'], - }), - ('lazy', '1.2-16', { - 'checksums': ['c796c8b987ed1bd9dfddd593e17312ed681fc4fa3a1ecfe51da2def0ac1e50df'], - }), - ('kernlab', '0.9-29', { - 'checksums': ['c3da693a0041dd34f869e7b63a8d8cf7d4bc588ac601bcdddcf7d44f68b3106f'], - }), - ('mime', '0.9', { - 'checksums': ['2ccf97d2940a09539dc051c7a9a1aee90ef04b34e9bc6c0b64b4435fb3c2fa80'], - }), - ('xfun', '0.13', { - 'checksums': ['a3da8d53b74ae58bb0f121177dcf3caf312c65fc181c18f168abd59afac33e0e'], - }), - ('markdown', '1.1', { - 'checksums': ['8d8cd47472a37362e615dbb8865c3780d7b7db694d59050e19312f126e5efc1b'], - }), - ('mlbench', '2.1-1', { - 'checksums': ['748141d56531a39dc4d37cf0a5165a40b653a04c507e916854053ed77119e0e6'], - }), - ('NLP', '0.2-0', { - 'checksums': ['fc64c80124c4e53b20f92b60c68e2fd33ee189653d0ceea410c32dd66d9e7075'], - }), - ('mclust', '5.4.6', { - 'checksums': ['d4ffcf36bf709ad42dccb2387263f67ca32012b0707f0ef6eda32730b5c286fc'], - }), - ('RANN', '2.6.1', { - 'checksums': ['b299c3dfb7be17aa41e66eff5674fddd2992fb6dd3b10bc59ffbf0c401697182'], - }), - ('rmeta', '3.0', { - 'checksums': ['b9f9d405935cffcd7a5697ff13b033f9725de45f4dc7b059fd68a7536eb76b6e'], - }), - ('segmented', '1.1-0', { - 'checksums': ['d081d0efaec708d717bf1248ba3df099876389c22796aad676655efb706e9d19'], - }), - ('som', '0.3-5.1', { - 'checksums': ['a6f4c0e5b36656b7a8ea144b057e3d7642a8b71972da387a7133f3dd65507fb9'], - }), - ('SuppDists', '1.1-9.5', { - 'checksums': ['680b67145c07d44e200275e08e48602fe19cd99fb106c05422b3f4a244c071c4'], - }), - ('stabledist', '0.7-1', { - 'checksums': ['06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69'], - }), - ('survivalROC', '1.0.3', { - 'checksums': ['1449e7038e048e6ad4d3f7767983c0873c9c7a7637ffa03a4cc7f0e25c31cd72'], - }), - ('pspline', '1.0-18', { - 'checksums': ['f71cf293bd5462e510ac5ad16c4a96eda18891a0bfa6447dd881c65845e19ac7'], - }), - ('timeDate', '3043.102', { - 'checksums': ['377cba03cddab8c6992e31d0683c1db3a73afa9834eee3e95b3b0723f02d7473'], - }), - ('longmemo', '1.1-2', { - 'checksums': ['7964e982287427dd58f98e1144e468ae0cbd572d25a4bea6ca9ae9c7522f3207'], - }), - ('ADGofTest', '0.3', { - 'checksums': ['9cd9313954f6ecd82480d373f6c5371ca84ab33e3f5c39d972d35cfcf1096846'], - }), - ('MASS', '7.3-51.6', { - 'checksums': ['e2035c47d8428b52afc02dd4f87ccb39a0085629932dfaff5f0a5d2c84ef3eee'], - }), - ('pixmap', '0.4-11', { - 'checksums': ['6fa010749a59cdf56aad9f81271473b7d55697036203f2cd5d81372bcded7412'], - }), - ('lattice', '0.20-41', { - 'checksums': ['54ca557f0cb33df60eb10b883c2ed2847e061ddd57ed9b5dd7695149609d57b5'], - }), - ('sp', '1.4-1', { - 'checksums': ['8f96f1a4827eea2cc02bb35c418ce2bdaf2d5ea47a214bcd4bc85f928b417039'], - }), - ('ade4', '1.7-15', { - 'checksums': ['3286fa7d8c372a5596e82c970c058e4cca1139a09935f14b238ba38aa9fdfdf6'], - }), - ('AlgDesign', '1.2.0', { - 'checksums': ['ff86c9e19505770520e7614970ad19c698664d08001ce888b8603e44c2a3b52a'], - }), - ('base64enc', '0.1-3', { - 'checksums': ['6d856d8a364bcdc499a0bf38bfd283b7c743d08f0b288174fba7dbf0a04b688d'], - }), - ('BH', '1.72.0-3', { - 'checksums': ['888ec1a3316bb69e1ba749b08ba7e0903ebc4742e3a185de8d148c13cddac8ab'], - }), - ('brew', '1.0-6', { - 'checksums': ['d70d1a9a01cf4a923b4f11e4374ffd887ad3ff964f35c6f9dc0f29c8d657f0ed'], - }), - ('Brobdingnag', '1.2-6', { - 'checksums': ['19eccaed830ce9d93b70642f6f126ac66722a98bbd48586899cc613dd9966ad4'], - }), - ('corpcor', '1.6.9', { - 'checksums': ['2e4fabd1d3936fecea67fa365233590147ca50bb45cf80efb53a10345a8a23c2'], - }), - ('longitudinal', '1.1.12', { - 'checksums': ['d4f894c38373ba105b1bdc89e3e7c1b215838e2fb6b4470b9f23768b84e603b5'], - }), - ('backports', '1.1.6', { - 'checksums': ['d0e8af477514d81b46cc777e0fa532835c1dc7eecd3c2432bb40228131bc199c'], - }), - ('checkmate', '2.0.0', { - 'checksums': ['0dc25b0e20c04836359df1885d099c6e4ad8ae0e585a9e4107f7ea945d9c6fa4'], - }), - ('cubature', '2.0.4', { - 'checksums': ['d97ce5eaac5e43910208e8274ddf6ff4f974d05688f0247ebccd807e24c2fe4a'], - }), - ('DEoptimR', '1.0-8', { - 'checksums': ['846911c1b2561a9fae73a8c60a21a5680963ebb0050af3c1f1147ae9a121e5ef'], - }), - ('digest', '0.6.25', { - 'checksums': ['15ccadb7b8bccaa221b6700bb549011719d0f4b38dbd3a1f29face3e019e2de5'], - }), - ('fastmatch', '1.1-0', { - 'checksums': ['20b51aa4838dbe829e11e951444a9c77257dcaf85130807508f6d7e76797007d'], - }), - ('ffbase', '0.12.8', { - 'checksums': ['18622f799641fb624dc274cdd31c52c9bd77c8f1f63fbb1dc636be80673b5356'], - }), - ('iterators', '1.0.12', { - 'checksums': ['96bf31d60ebd23aefae105d9b7790715e63327eec0deb2ddfb3d543994ea9f4b'], - }), - ('maps', '3.3.0', { - 'checksums': ['199afe19a4edcef966ae79ef802f5dcc15a022f9c357fcb8cae8925fe8bd2216'], - }), - ('nnls', '1.4', { - 'checksums': ['0e5d77abae12bc50639d34354f96a8e079408c9d7138a360743b73bd7bce6c1f'], - }), - ('sendmailR', '1.2-1', { - 'checksums': ['04feb08c6c763d9c58b2db24b1222febe01e28974eac4fe87670be6fb9bff17c'], - }), - ('dotCall64', '1.0-0', { - 'checksums': ['69318dc6b8aecc54d4f789c8105e672198363b395f1a764ebaeb54c0473d17ad'], - }), - ('spam', '2.5-1', { - 'checksums': ['d145881a0d48351ce88678a57862c0d0f716d98f3166f6338d954acacc51c067'], - }), - ('subplex', '1.6', { - 'checksums': ['0d05da1622fffcd20a01cc929fc6c2b7df40a8246e7018f7f1f3c175b774cbf9'], - }), - ('stringi', '1.4.6', { - 'checksums': ['633f67da5bd7bcb611764e4f478b0da050d22a715bbcbdd67aed0300dcef6fd6'], - }), - ('magrittr', '1.5', { - 'checksums': ['05c45943ada9443134caa0ab24db4a962b629f00b755ccf039a2a2a7b2c92ae8'], - }), - ('glue', '1.4.0', { - 'checksums': ['ea6c409f7141754baa090deba96cff270a11b185452cf9e6fb69cb148a9069c1'], - }), - ('stringr', '1.4.0', { - 'checksums': ['87604d2d3a9ad8fd68444ce0865b59e2ffbdb548a38d6634796bbd83eeb931dd'], - }), - ('evaluate', '0.14', { - 'checksums': ['a8c88bdbe4e60046d95ddf7e181ee15a6f41cdf92127c9678f6f3d328a3c5e28'], - }), - ('logspline', '2.1.15', { - 'checksums': ['dfe0c89a2ae219d121ea7af788dd994097f42d2ff39f4f86f5c4288a4ec0f71e'], - }), - ('ncbit', '2013.03.29', { - 'checksums': ['4480271f14953615c8ddc2e0666866bb1d0964398ba0fab6cc29046436820738'], - }), - ('permute', '0.9-5', { - 'checksums': ['d2885384a07497e8df273689d6713fc7c57a7c161f6935f3572015e16ab94865'], - }), - ('plotrix', '3.7-8', { - 'checksums': ['8ccd1f7e656413b9956cea614c986ce9cc61366deba356afb38cee6672a59480'], - }), - ('randomForest', '4.6-14', { - 'checksums': ['f4b88920419eb0a89d0bc5744af0416d92d112988702dc726882394128a8754d'], - }), - ('scatterplot3d', '0.3-41', { - 'checksums': ['4c8326b70a3b2d37126ca806771d71e5e9fe1201cfbe5b0d5a0a83c3d2c75d94'], - }), - ('SparseM', '1.78', { - 'checksums': ['d6b79ec881a10c91cb03dc23e6e783080ded9db4f2cb723755aa0d7d29a8b432'], - }), - ('tripack', '1.3-9', { - 'checksums': ['2b40670c23894b12e86a36fb2f42cab4728c8af8bd5338e94fbf86b7916a8c10'], - }), - ('R6', '2.4.1', { - 'checksums': ['26b0fd64827655c28c903f7ff623e839447387f3ad9b04939a02f41ac82faa3e'], - }), - ('irace', '3.4.1', { - 'checksums': ['7eea92ba42e6ba320fa8bdca3c53091ae42f26a0f097244f65e7e117f6d514b6'], - }), - ('rJava', '0.9-12', { - 'checksums': ['2248a8c73cacfecf75445ad0ebda4960409ec3f21afb180a1bc02a6de4057b0f'], - }), - ('RColorBrewer', '1.1-2', { - 'checksums': ['f3e9781e84e114b7a88eb099825936cc5ae7276bbba5af94d35adb1b3ea2ccdd'], - }), - ('png', '0.1-7', { - 'checksums': ['e269ff968f04384fc9421d17cfc7c10cf7756b11c2d6d126e9776f5aca65553c'], - }), - ('jpeg', '0.1-8.1', { - 'checksums': ['1db0a4976fd9b2ae27a37d3e856cca35bc2909323c7a40724846a5d3c18915a9'], - }), - ('latticeExtra', '0.6-29', { - 'checksums': ['6cadc31d56f73d926e2e8d72e43ae17ac03607a4d1a374719999a4a231e3df11'], - }), - ('Matrix', '1.2-18', { - 'checksums': ['f7ff018c2811946767ffd4c96d3987e859b82786ff72e1c211ab18bc03cb6119'], - }), - ('RcppArmadillo', '0.9.870.2.0', { - 'checksums': ['bb605d6702d49d447b18986d9a7864b27e5512630a6713bca67afbb5bb54f1e4'], - }), - ('plyr', '1.8.6', { - 'checksums': ['ea55d26f155443e9774769531daa5d4c20a0697bb53abd832e891b126c935287'], - }), - ('gtable', '0.3.0', { - 'checksums': ['fd386cc4610b1cc7627dac34dba8367f7efe114b968503027fb2e1265c67d6d3'], - }), - ('reshape2', '1.4.4', { - 'checksums': ['d88dcf9e2530fa9695fc57d0c78adfc5e361305fe8919fe09410b17da5ca12d8'], - }), - ('dichromat', '2.0-0', { - 'checksums': ['31151eaf36f70bdc1172da5ff5088ee51cc0a3db4ead59c7c38c25316d580dd1'], - }), - ('colorspace', '1.4-1', { - 'checksums': ['693d713a050f8bfecdb7322739f04b40d99b55aed168803686e43401d5f0d673'], - }), - ('munsell', '0.5.0', { - 'checksums': ['d0f3a9fb30e2b5d411fa61db56d4be5733a2621c0edf017d090bdfa5e377e199'], - }), - ('labeling', '0.3', { - 'checksums': ['0d8069eb48e91f6f6d6a9148f4e2dc5026cabead15dd15fc343eff9cf33f538f'], - }), - ('viridisLite', '0.3.0', { - 'checksums': ['780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af'], - }), - ('farver', '2.0.3', { - 'checksums': ['0e1590df79ec6078f10426411b96216b70568a4eaf3ffd84ca723add0ed8e5cc'], - }), - ('rlang', '0.4.5', { - 'checksums': ['cd1fac76f1a6ac26e07cc11dd08c55947fe152e4703daf8c94e3a650721b10a8'], - }), - ('lifecycle', '0.2.0', { - 'checksums': ['29746e8dee05d4e36f9c612e8c7a903a4f648a36b3b94c9776e518c38a412224'], - }), - ('scales', '1.1.0', { - 'checksums': ['1ee4a6fd1dbc5f52fe57dd8cce8caee4ce2fecb02d4e7d519e83f15aa45b2d03'], - }), - ('assertthat', '0.2.1', { - 'checksums': ['85cf7fcc4753a8c86da9a6f454e46c2a58ffc70c4f47cac4d3e3bcefda2a9e9f'], - }), - ('crayon', '1.3.4', { - 'checksums': ['fc6e9bf990e9532c4fcf1a3d2ce22d8cf12d25a95e4779adfa17713ed836fa68'], - }), - ('fansi', '0.4.1', { - 'checksums': ['3c69eec803a3827e5227f9cf084976eeb738b22c7eb7665bb5faa251bce41e09'], - }), - ('cli', '2.0.2', { - 'checksums': ['490834e5b80eb036befa0e150996bcab1c4d5d168c3d45209926e52d0d5413b6'], - }), - ('utf8', '1.1.4', { - 'checksums': ['f6da9cadfc683057d45f54b43312a359cf96ec2731c0dda18a8eae31d1e31e54'], - }), - ('zeallot', '0.1.0', { - 'checksums': ['439f1213c97c8ddef9a1e1499bdf81c2940859f78b76bc86ba476cebd88ba1e9'], - }), - ('ellipsis', '0.3.0', { - 'checksums': ['0bf814cb7a1f0ee1f2949bdc98752a0d535f2a9489280dd4d8fcdb10067ee907'], - }), - ('vctrs', '0.2.4', { - 'checksums': ['dcc8b6bfd2d951d48d338a3d4deaaabfee356c0ee43169a6d6b06ea78cfe4f97'], - }), - ('pillar', '1.4.3', { - 'checksums': ['5a8bc40bd836baab80f70fba268e3f530fb464a9268ad99f1c037380f83f560b'], - }), - ('pkgconfig', '2.0.3', { - 'checksums': ['330fef440ffeb842a7dcfffc8303743f1feae83e8d6131078b5a44ff11bc3850'], - }), - ('tibble', '3.0.1', { - 'checksums': ['154552cfb767e0bd48ef3fc61df8286ce52205e970815c5bc2560f41eceea79e'], - }), - ('lazyeval', '0.2.2', { - 'checksums': ['d6904112a21056222cfcd5eb8175a78aa063afe648a562d9c42c6b960a8820d4'], - }), - ('withr', '2.2.0', { - 'checksums': ['4c21e51cf48f8c281ddd5f5ec358ac446df3c982104fd00bfe62d9259d73b582'], - }), - ('nlme', '3.1-147', { - 'checksums': ['efc8d82e5758c4696458f647f2b8bb1776fdbff6c8eaad3fe3c0573a3c1f0ce7'], - }), - ('mgcv', '1.8-31', { - 'checksums': ['736de462a0ac43a6ed38cd57dfb0ba2942c941dfbb538128782727ab7125c3c5'], - }), - ('rprojroot', '1.3-2', { - 'checksums': ['df5665834941d8b0e377a8810a04f98552201678300f168de5f58a587b73238b'], - }), - ('desc', '1.2.0', { - 'checksums': ['e66fb5d4fc7974bc558abcdc107a1f258c9177a29dcfcf9164bc6b33dd08dae8'], - }), - ('ps', '1.3.2', { - 'checksums': ['89f2456af6c7ffbc151aeccb620584ca26015b7d6188eb188488c7c4afc14704'], - }), - ('processx', '3.4.2', { - 'checksums': ['94a0ffc632759be85d13b7b11ed006adf6c08c2d9cd99612cd0372833bd75c09'], - }), - ('callr', '3.4.3', { - 'checksums': ['01b7277f20c1d662c6bebbfa2798d179922b36d4148b4298853579aeda0382b5'], - }), - ('prettyunits', '1.1.1', { - 'checksums': ['9a199aa80c6d5e50fa977bc724d6e39dae1fc597a96413053609156ee7fb75c5'], - }), - ('pkgbuild', '1.0.7', { - 'checksums': ['29bb38a38202ba780d2d46aeca0a6e2f052653e4a83891ec38d19bebd131a971'], - }), - ('rstudioapi', '0.13', { - 'checksums': ['aac35bbdcb4a8e8caba943bc8a2b98120e8940b80cd1020224bb1a26ff776d8b'], - }), - ('pkgload', '1.0.2', { - 'checksums': ['3186564e690fb05eabe76e1ac0bfd4312562c3ac8794b29f8850399515dcf27c'], - }), - ('praise', '1.0.0', { - 'checksums': ['5c035e74fd05dfa59b03afe0d5f4c53fbf34144e175e90c53d09c6baedf5debd'], - }), - ('testthat', '2.3.2', { - 'checksums': ['1a268d8df07f7cd8d282d03bb96ac2d96a24a95c9aa52f4cca5138a09dd8e06c'], - }), - ('isoband', '0.2.1', { - 'checksums': ['18883606bea8352e04a4618bea4e5c9833269e73a46b50bc006dddf4c8b6b4d9'], - }), - ('ggplot2', '3.3.0', { - 'checksums': ['320e3c76fe0d0397e29f4782bf85af3647518154b3900a39fd18cf024c554148'], - }), - ('pROC', '1.16.2', { - 'checksums': ['b68b960ed9a2cdea7976943649082c3945e370d14115b7adbce440fc7f51fc2a'], - }), - ('quadprog', '1.5-8', { - 'checksums': ['22128dd6b08d3516c44ff89276719ad4fe46b36b23fdd585274fa3a93e7a49cd'], - }), - ('BB', '2019.10-1', { - 'checksums': ['04d0b6ce6e5f070b109478a6005653dbe78613bb4e3ea4903203d851b5d3c94d'], - }), - ('BBmisc', '1.11', { - 'checksums': ['1ea48c281825349d8642a661bb447e23bfd651db3599bf72593bfebe17b101d2'], - }), - ('fail', '1.3', { - 'checksums': ['ede8aa2a9f2371aff5874cd030ac625adb35c33954835b54ab4abf7aeb34d56d'], - }), - ('rlecuyer', '0.3-5', { - 'checksums': ['4723434ff7624d4f404a6854ffa0673fc43daa46f58f064dbeeaa17da28ab626'], - }), - ('snow', '0.4-3', { - 'checksums': ['8512537daf334ea2b8074dbb80cf5e959a403a78d68bc1e97664e8a4f64576d8'], - }), - ('tree', '1.0-40', { - 'checksums': ['ffab16382d7ed5b76529801ab26b4970363b2072231c6a87330326298ce626e7'], - }), - ('pls', '2.7-2', { - 'checksums': ['67e91e36dbebeb2f2d9c9b88f310dc00f70de275e5f382f392e72dd36af42b88'], - }), - ('class', '7.3-17', { - 'checksums': ['be1f85b6df7556db93f50cb08106aac6620d4b5bb3fee846422863a022461313'], - }), - ('e1071', '1.7-3', { - 'checksums': ['bb2dba526b673ec3a573befe365e3500b773593f0384fd6694e0835496bcc25d'], - }), - ('nnet', '7.3-14', { - 'checksums': ['5d1b9e9764d74d16c651f18f949aa4e9e2995ba64633cbfa2c6a7355ae30f4af'], - }), - ('minqa', '1.2.4', { - 'checksums': ['cfa193a4a9c55cb08f3faf4ab09c11b70412523767f19894e4eafc6e94cccd0c'], - }), - ('RcppEigen', '0.3.3.7.0', { - 'checksums': ['62ea627284425bfdb56613bc315cca492ed3483a56a03c1f9dc9821a25c3e8ac'], - }), - ('MatrixModels', '0.4-1', { - 'checksums': ['fe878e401e697992a480cd146421c3a10fa331f6b37a51bac83b5c1119dcce33'], - }), - ('quantreg', '5.55', { - 'checksums': ['cbe1541409aed8222a41043958ab9c352b84dba4e0766b54bf1eac59d2454cfe'], - }), - ('robustbase', '0.93-6', { - 'checksums': ['ea1463a646a0aad0cc6f48e011c8baf990178f1228e0759be63259123b3a24b3'], - }), - ('zoo', '1.8-7', { - 'checksums': ['9e072ddc8f245adcdeb230cbf0c818db9c028c320894f48211758da2bf2085f0'], - }), - ('lmtest', '0.9-37', { - 'checksums': ['ddc929f94bf055974832fa4a20fdd0c1eb3a84ee11f716c287936f2141d5ca0a'], - }), - ('vcd', '1.4-7', { - 'checksums': ['ec89b2ad202b89d70344b49d9410ddc8a15dced00462ed7e6f9b516811325299'], - }), - ('snowfall', '1.84-6.1', { - 'checksums': ['5c446df3a931e522a8b138cf1fb7ca5815cc82fcf486dbac964dcbc0690e248d'], - }), - ('rpart', '4.1-15', { - 'checksums': ['2b8ebe0e9e11592debff893f93f5a44a6765abd0bd956b0eb1f70e9394cfae5c'], - }), - ('survival', '3.1-12', { - 'checksums': ['b62ed66eb646f3df13f7e9bf6571e3bfecae128c66491e174c8833cbef1bf21f'], - }), - ('bindr', '0.1.1', { - 'checksums': ['7c785ca77ceb3ab9282148bcecf64d1857d35f5b800531d49483622fe67505d0'], - }), - ('plogr', '0.2.0', { - 'checksums': ['0e63ba2e1f624005fe25c67cdd403636a912e063d682eca07f2f1d65e9870d29'], - }), - ('bindrcpp', '0.2.2', { - 'checksums': ['48130709eba9d133679a0e959e49a7b14acbce4f47c1e15c4ab46bd9e48ae467'], - }), - ('purrr', '0.3.4', { - 'checksums': ['23ebc93bc9aed9e7575e8eb9683ff4acc0270ef7d6436cc2ef4236a9734840b2'], - }), - ('tidyselect', '1.0.0', { - 'checksums': ['fe761766d03af86d04da9a9a7800e9c8271d2cb067776cfb817d853725d59caf'], - }), - ('dplyr', '0.8.5', { - 'checksums': ['5750d3bf4bda7b5448e08af264ed183b4f7bd0c59a9d828fe9dd399b14590218'], - }), - ('tidyr', '1.0.2', { - 'checksums': ['2403dd2f4d350d1ecb449be5d61cdccf6655572a807409325f9122c716924e69'], - }), - ('mnormt', '1.5-6', { - 'checksums': ['2951fcc9711d577d7d5577d313875c4ed9f7a7a06df8c3b62fa27c231f021e4d'], - }), - ('foreign', '0.8-79', { - 'checksums': ['af36b6945afbf849543ef7e432e27fecb66d5a8b3932b2428e59c2392699b9b4'], - }), - ('psych', '1.9.12.31', { - 'checksums': ['25e71dbe0b0d8211e7a9c8439c1c205a25fd571a0f95a89b8425b87b95b9290a'], - }), - ('generics', '0.0.2', { - 'checksums': ['71b3d1b719ce89e71dd396ac8bc6aa5f1cd99bbbf03faff61dfbbee32fec6176'], - }), - ('broom', '0.5.6', { - 'checksums': ['0b06f670ace7349b95a71313c360dbded2428d65d17eaacccc93750196964335'], - }), - ('nloptr', '1.2.2.1', { - 'checksums': ['d037bea484725cf6cbc069eee17db17f1bc8fc4edc1f1ca16cf6c34bc21610ae'], - }), - ('boot', '1.3-25', { - 'checksums': ['464835fcb453072346ce49e4ae318e04c9dba682349be49db616623b6088fbbe'], - }), - ('statmod', '1.4.34', { - 'checksums': ['1a81c286e099d2395e39f47f1e87295b8e1d64b64ec55bb09bc817ae8879747a'], - }), - ('lme4', '1.1-23', { - 'checksums': ['99d542b1f78fae33a64f1b8eec33b7a4532a8d82d2ac47bdb2838248f14c0262'], - }), - ('ucminf', '1.1-4', { - 'checksums': ['a2eb382f9b24e949d982e311578518710f8242070b3aa3314a331c1e1e7f6f07'], - }), - ('numDeriv', '2016.8-1.1', { - 'checksums': ['d8c4d19ff9aeb31b0c628bd4a16378e51c1c9a3813b525469a31fe89af00b345'], - }), - ('ordinal', '2019.12-10', { - 'checksums': ['7a41e7b7e852a8fa3e911f8859d36e5709ccec5ca42ee3de14a813b7aaac7725'], - }), - ('jomo', '2.6-10', { - 'checksums': ['4063d48e259e936dc0bd9dc616a09043f695703848cb1bf8faa08c07922034cd'], - }), - ('hms', '0.5.3', { - 'checksums': ['4e2b67c8cf65fe86179f24f42d82b3ca9377d5907837bda98b4fc6c2318853ad'], - }), - ('clipr', '0.7.0', { - 'checksums': ['03a4e4b72ec63bd08b53fe62673ffc19a004cc846957a335be2b30d046b8c2e2'], - }), - ('readr', '1.3.1', { - 'checksums': ['33f94de39bb7f2a342fbb2bd4e5afcfec08798eac39672ee18042ac0b349e4f3'], - }), - ('forcats', '0.5.0', { - 'checksums': ['8f960e789333ec597ddf2d653a64e330f03b86f465e9b71f6779f227355d90c4'], - }), - ('haven', '2.2.0', { - 'checksums': ['199ee9b14e1ff70a0b0c3b9ce33dfdec8ed3b5e857a2a36bfb82e78a7b352d3d'], - }), - ('pan', '1.6', { - 'checksums': ['adc0df816ae38bc188bce0aef3aeb71d19c0fc26e063107eeee71a81a49463b6'], - }), - ('mitml', '0.3-7', { - 'checksums': ['c6f796d0059f1b093b599a89d955982fa257de9c45763ecc2cbbce10fdec1e7b'], - }), - ('mice', '3.8.0', { - 'checksums': ['04bc18d6cf225d626d4a5d52dd98a30a19662ae14263c83b51744efce25e7ec5'], - }), - ('urca', '1.3-0', { - 'checksums': ['621cc82398e25b58b4a16edf000ed0a1484d9a0bc458f734e97b6f371cc76aaa'], - }), - ('fracdiff', '1.5-1', { - 'checksums': ['b8103b32a4ca3a59dda1624c07da08ecd144c7a91a747d1f4663e99421950eb6'], - }), - ('logistf', '1.23', { - 'checksums': ['5adb22a40569883395dc048c877f849dd08d07582a991f1b160f0338f0b13838'], - }), - ('akima', '0.6-2', { - 'checksums': ['61da3e556553eea6d1f8db7c92218254441da31e365bdef82dfe5da188cc97ce'], - }), - ('bitops', '1.0-6', { - 'checksums': ['9b731397b7166dd54941fb0d2eac6df60c7a483b2e790f7eb15b4d7b79c9d69c'], - }), - ('mixtools', '1.2.0', { - 'checksums': ['ef033ef13625209065d26767bf70d129972e6808927f755629f1d70a118b9023'], - }), - ('cluster', '2.1.0', { - 'checksums': ['eaf955bef8f616ea563351ec7f597c445aec43e65991ca975e382ef1fd70aa14'], - }), - ('gclus', '1.3.2', { - 'checksums': ['9cc61cdff206c11213e73afca3d570a7234250cf6044a9202c2589932278e0b3'], - }), - ('coda', '0.19-3', { - 'checksums': ['d3df1fc848bcf1af8fae13d61eeab60e99a3d4b4db384bec4326f909f502c5d6'], - }), - ('codetools', '0.2-16', { - 'checksums': ['f67a66175cb5d8882457d1e9b91ea2f16813d554fa74f80c1fd6e17cf1877501'], - }), - ('foreach', '1.5.0', { - 'checksums': ['1af9a713418c4cdeb49c4194f6482a7ee8ae4959b995a958a8a8a19ec8b60415'], - }), - ('doMC', '1.3.6', { - 'checksums': ['2977fc9e2dc54d85d45b4a36cd286dff72834fbc73f38b6ee45a6eb8557fc9b2'], - }), - ('DBI', '1.1.0', { - 'checksums': ['a96db7fa39a58f1ed34c6e78d8f5f7e4cf0882afb301323b5c6975d6729203e4'], - }), - ('gam', '1.16.1', { - 'checksums': ['80d04102c6152143e8ed364f91eb312e413f73b8fcab7cf15d677867a16e74b9'], - }), - ('gamlss.data', '5.1-4', { - 'checksums': ['0d3777d8c3cd76cef273aa6bde40a91688719be401195ed9bfd1e85bd7d5eeb5'], - }), - ('gamlss.dist', '5.1-6', { - 'checksums': ['3ff0e36dfd7ddea109410c539375a408af8f8f865dd8865555e41fb0402720dd'], - }), - ('gamlss', '5.1-6', { - 'checksums': ['b9c9a21343ed7777c239d8c5ad63b6f6efa0254bfcd6eaf66a74c319268b6799'], - }), - ('gamlss.tr', '5.1-0', { - 'checksums': ['f9e1c4935d8876bfc80dddc0a9bc2c82b4deeda9482df208297a84a638a4a9df'], - }), - ('hwriter', '1.3.2', { - 'checksums': ['6b3531d2e7a239be9d6e3a1aa3256b2745eb68aa0bdffd2076d36552d0d7322b'], - }), - ('KernSmooth', '2.23-17', { - 'checksums': ['2b3d73fe15db46dbc2f6e3b043baadb7633c46bfa4a66d9eea5aed633058e685'], - }), - ('xts', '0.12-0', { - 'checksums': ['df11e6dad7cf0a266702988fa6127aaf72437da743ca40e9abcd9e6b3d628c60'], - }), - ('curl', '4.3', { - 'checksums': ['7406d485bb50a6190e3ed201e3489063fd249b8b3b1b4f049167ac405a352edb'], - }), - ('TTR', '0.23-6', { - 'checksums': ['afc10a89d3a18f121ddf0f7256408eeb05cc64e18ee94e654bfa803e5415e265'], - }), - ('quantmod', '0.4.17', { - 'checksums': ['0aff56f276f8e347c56356060f7320913f0e417f1c5411c49f0865ca732044eb'], - }), - ('mvtnorm', '1.1-0', { - 'checksums': ['8112e12eb11f5db2ff145893f48426520e669be99b87889457dd2c4f2636cb5d'], - }), - ('pcaPP', '1.9-73', { - 'checksums': ['ca4566b0babfbe83ef9418283b08a12b3420dc362f93c6562f265df7926b53fc'], - }), - ('SQUAREM', '2020.2', { - 'checksums': ['6e3373bb5190ade222d676dae9f1aad32feddd50e97499fab7d66fd94752dac8'], - }), - ('lava', '1.6.7', { - 'checksums': ['63f7a8454cfc70739877812481a987deea33d4235f05234d0dd0ed8bd6eadf39'], - }), - ('prodlim', '2019.11.13', { - 'checksums': ['6809924f503a14681de84730489cdaf9240d7951c64f5b98ca37dc1ce7809b0f'], - }), - ('pscl', '1.5.5', { - 'checksums': ['054c9b88a991abdec3338688f58e81b6ba55f91edb988621864b24fd152fee6f'], - }), - ('memoise', '1.1.0', { - 'checksums': ['b276f9452a26aeb79e12dd7227fcc8712832781a42f92d70e86040da0573980c'], - }), - ('bit64', '0.9-7', { - 'checksums': ['7b9aaa7f971198728c3629f9ba1a1b24d53db5c7e459498b0fdf86bbd3dff61f'], - }), - ('blob', '1.2.1', { - 'checksums': ['ef54bc7a9646c1b73f4d2f60c869b4f1940bc3505874175114297ad7772d8bea'], - }), - ('RSQLite', '2.2.0', { - 'checksums': ['000d126fda069cd97d1a9f9df16cd267ca76d8b96c290ca9b8c32d9e91d468d4'], - }), - ('data.table', '1.12.8', { - 'checksums': ['d3a75f3a355ff144cc20a476041617e21fcf2a9f79265fd9bbd4693f3671f9dc'], - }), - ('BatchJobs', '1.8', { - 'checksums': ['35cc2dae31994b1df982d11939509ce965e12578418c4fbb8cd7a422afd6e4ff'], - }), - ('sandwich', '2.5-1', { - 'checksums': ['dbef6f4d12b83e166f9a2508b7c732b04493641685d6758d29f3609e564166d6'], - }), - ('sfsmisc', '1.1-6', { - 'checksums': ['57b22cdd713e71e9235ff1ace8cdf73564bfdcee4b018f3d7cde6fb35493db11'], - }), - ('spatial', '7.3-12', { - 'checksums': ['7639039ee7407bd088e1b253376b2cb4fcdf4cc9124d6b48e4119d5cda872d63'], - }), - ('VGAM', '1.1-3', { - 'checksums': ['0c9ff51b9ee76d8b182a19b61f278970ad6d421c0206bfef40b7413b7acb94c3'], - }), - ('waveslim', '1.8.2', { - 'checksums': ['133c4f7a027282742fe99b583ca65f178fc7a3df2ce75cb4d60650f0a1dd7145'], - }), - ('xtable', '1.8-4', { - 'checksums': ['5abec0e8c27865ef0880f1d19c9f9ca7cc0fd24eadaa72bcd270c3fb4075fd1c'], - }), - ('profileModel', '0.6.0', { - 'checksums': ['a829ceec29c817d6d15947b818e28f9cf5a188a231b9b5d0a75018388887087b'], - }), - ('brglm', '0.6.2', { - 'checksums': ['c2af432a43ccf37e9de50317f770b9703a4c80b4ef79ec40aa8e7ec3987e3631'], - }), - ('deSolve', '1.28', { - 'checksums': ['4c55ef4cae841df91034382d277b483985af120240f87af587ff82177fdb5a49'], - }), - ('tseriesChaos', '0.1-13.1', { - 'checksums': ['23cb5fea56409a305e02a523ff8b7642ec383942d415c9cffdc92208dacfd961'], - }), - ('tseries', '0.10-47', { - 'checksums': ['202377df56806fe611c2e12c4d9732c71b71220726e2defa7e568d2b5b62fb7b'], - }), - ('fastICA', '1.2-2', { - 'checksums': ['32223593374102bf54c8fdca7b57231e4f4d0dd0be02d9f3500ad41b1996f1fe'], - }), - ('R.methodsS3', '1.8.0', { - 'checksums': ['e005f5ee21bfb6fbbf415de957a9ca0ed6e9f2800b95d98d76a9acb3c14185a5'], - }), - ('R.oo', '1.23.0', { - 'checksums': ['f5124ce3dbb0a62e8ef1bfce2de2d1dc2f776e8c48fd8cac358f7f5feb592ea1'], - }), - ('jsonlite', '1.6.1', { - 'checksums': ['74921dd249857a23afabc1ad1485a63a48828e57f240f0619deb04c60f883377'], - }), - ('sys', '3.3', { - 'checksums': ['a6217c2a7240ed68614006f392c6d062247dab8b9b0d498f95e947110df19b93'], - }), - ('askpass', '1.1', { - 'checksums': ['db40827d1bdbb90c0aa2846a2961d3bf9d76ad1b392302f9dd84cc2fd18c001f'], - }), - ('openssl', '1.4.1', { - 'checksums': ['f7fbecc75254fc43297a95a4338c674ab9ba2ec056b59e027d16d23122161fc6'], - }), - ('httr', '1.4.1', { - 'checksums': ['675c7e07bbe82c48284ee1ab929bb14a6e653abae2860d854dc41a3c028de156'], - }), - ('cgdsr', '1.3.0', { - 'checksums': ['4aa2a3564cee2449c3ff39ab2ad631deb165d4c78b8107e0ff77a9095340cc1f'], - }), - ('R.utils', '2.9.2', { - 'checksums': ['ac6b3b8e814fbb855c38fbdb89a4f0cf0ed65ce7fa308445bd74107fbc0d32cf'], - }), - ('R.matlab', '3.6.2', { - 'checksums': ['1ba338f470a24b7f6ef68cadbd04eb468ead4a689f263d2642408ad591b786bb'], - }), - ('gridExtra', '2.3', { - 'checksums': ['81b60ce6f237ec308555471ae0119158b115463df696d2eca9b177ded8988e3b'], - }), - ('gbm', '2.1.5', { - 'checksums': ['06fbde10639dfa886554379b40a7402d1f1236a9152eca517e97738895a4466f'], - }), - ('Formula', '1.2-3', { - 'checksums': ['1411349b20bd09611a9fd0ee6d15f780c758ad2b0e490e908facb49433823872'], - }), - ('acepack', '1.4.1', { - 'checksums': ['82750507926f02a696f6cc03693e8d4a5ee7e92500c8c15a16a9c12addcd28b9'], - }), - ('proto', '1.0.0', { - 'checksums': ['9294d9a3b2b680bb6fac17000bfc97453d77c87ef68cfd609b4c4eb6d11d04d1'], - }), - ('chron', '2.3-55', { - 'checksums': ['0f731fb9e79818cd95b5fa843cc233616a5f8e5dd39a1ae8048f5a1fd8d1eb25'], - }), - ('viridis', '0.5.1', { - 'checksums': ['ddf267515838c6eb092938133035cee62ab6a78760413bfc28b8256165701918'], - }), - ('yaml', '2.2.1', { - 'checksums': ['1115b7bc2a397fa724956eec916df5160c600c99a3be186d21558dd38d782783'], - }), - ('htmltools', '0.4.0', { - 'checksums': ['5b18552e1183b1b90b5cca8e7f95b57e8124c9d517b22aa64783b829513b811a'], - }), - ('htmlwidgets', '1.5.1', { - 'checksums': ['d42e59144552d9b4131f11ddd6169dfb9bd538c7996669a09acbdb400d18d781'], - }), - ('knitr', '1.28', { - 'checksums': ['05ee01da31d715bf24793efb3e4ef3bb3101ef1e1ab2d760c645fc5b9d40232a'], - }), - ('htmlTable', '1.13.3', { - 'checksums': ['d459c43675f6ee0a1ae8232ea8819b2a842e795a833b28127081fa344d09393d'], - }), - ('Hmisc', '4.4-0', { - 'checksums': ['f16ecf4c5ee2202d51f426282a54f8000ffa8b9747c3e910205f34f878556ec7'], - }), - ('fastcluster', '1.1.25', { - 'checksums': ['f3661def975802f3dd3cec5b2a1379f3707eacff945cf448e33aec0da1ed4205'], - }), - ('registry', '0.5-1', { - 'checksums': ['dfea36edb0a703ec57e111016789b47a1ba21d9c8ff30672555c81327a3372cc'], - }), - ('bibtex', '0.4.2.2', { - 'checksums': ['073887668f16568d9fafaa5862ed7d3d866f40cbc1a028371b038cdbbe9c1090'], - }), - ('pkgmaker', '0.31.1', { - 'checksums': ['1702b8e2fa9751fa67c3031468273eaa28358d27ba2df98a4fbb08df80500f64'], - }), - ('rngtools', '1.5', { - 'checksums': ['8274873b73f7acbe0ce007e62893bf4d369d2aab8768754a60da46b3f078f575'], - }), - ('doParallel', '1.0.15', { - 'checksums': ['71ad7ea69616468996aefdd8d02a4a234759a21ddde9ed1657e3c537145cd86e'], - }), - ('gridBase', '0.4-7', { - 'checksums': ['be8718d24cd10f6e323dce91b15fc40ed88bccaa26acf3192d5e38fe33e15f26'], - }), - ('irlba', '2.3.3', { - 'checksums': ['6ee233697bcd579813bd0af5e1f4e6dd1eea971e8919c748408130d970fef5c0'], - }), - ('igraph', '1.2.5', { - 'checksums': ['0cdd675b2e6a31f54bd5ba4530a26f00996eb310ceea93263c6fc4ba9e0fdf88'], - }), - ('GeneNet', '1.2.14', { - 'checksums': ['76f4d1a5954b3060d95017b0108b2f0936fdf38c15e5c1fd051cfc5c82ccb031'], - }), - ('ape', '5.3', { - 'checksums': ['08b0df134c523feb00a86896d1aa2a43f0f0dab20a53bc6b5d6268d867988b23'], - }), - ('RJSONIO', '1.3-1.4', { - 'checksums': ['54142c931e15eca278a02dad5734026bb49d960471eb085008af825352953190'], - }), - ('caTools', '1.18.0', { - 'checksums': ['0343698a41e8b516769af0433ac2e52a7df9be709b7f78c1825e88e1a37f3378'], - }), - ('gplots', '3.0.3', { - 'checksums': ['d776d3ee9e284085f6ec1b7717afcd5c4addad60d2f1f4f220cda788c8ac4643'], - }), - ('ROCR', '1.0-7', { - 'checksums': ['e7ef710f847e441a48b20fdc781dbc1377f5a060a5ee635234053f7a2a435ec9'], - }), - ('later', '1.0.0', { - 'checksums': ['277b9848ef2e5e1ac7257aefeb58f6b20cca17693460e7c4eee0477de456b287'], - }), - ('promises', '1.1.0', { - 'checksums': ['c8ea0f3e3256cf3010439b3a6111966db419c3dcff9a561e73caf8bd65f38006'], - }), - ('httpuv', '1.5.2', { - 'checksums': ['93b32be974e0f531a3cb343685165c0caadf30cfea07683f8d69302a34045d8d'], - }), - ('rjson', '0.2.20', { - 'checksums': ['3a287c1e5ee7c333ed8385913c0a307daf99335fbdf803e9dcca6e3d5adb3f6c'], - }), - ('sourcetools', '0.1.7', { - 'checksums': ['47984406efb3b3face133979ccbae9fefb7360b9a6ca1a1c11473681418ed2ca'], - }), - ('fastmap', '1.0.1', { - 'checksums': ['4778b05dfebd356f8df980dfeff3b973a72bca14898f870e5c40c1d84db9faec'], - }), - ('shiny', '1.4.0.2', { - 'checksums': ['dca6ac83d03266a3d930273e7b821afa4a574f02ef89f963672972c2a2f5e064'], - }), - ('seqinr', '3.6-1', { - 'checksums': ['c44fc8922ef410da3c3b5ca117cdbec55ccb546c9e6d96c01ede44398dfa6048'], - }), - ('LearnBayes', '2.15.1', { - 'checksums': ['9b110858456523ca0b2a63f22013c4e1fbda6674b9d84dc1f4de8bffc5260532'], - }), - ('deldir', '0.1-25', { - 'checksums': ['f0a2f2eb511e8e99423a8f9b6ebc9073967d79629db4c86824eb0696d1a6af4d'], - }), - ('gmodels', '2.18.1', { - 'checksums': ['626140a34eb8c53dd0a06511a76c71bc61c48777fa76fcc5e6934c9c276a1369'], - }), - ('expm', '0.999-4', { - 'checksums': ['58d06427a08c9442462b00a5531e2575800be13ed450c5a1546261251e536096'], - }), - ('raster', '3.1-5', { - 'checksums': ['db6622d55bb9e5c4a8d8e59887a802b35fc07dcee946800453bc5e1901c01a04'], - }), - ('spData', '0.3.5', { - 'checksums': ['901e840ba42e945d51ea0dfe815fece44dd92a8e74a2356345ccbb2577908926'], - }), - ('units', '0.6-6', { - 'checksums': ['d0b6c76afb9aa5d7a0eaae05e6fc1bd2bb9d62d4c43e986b4782d6e5c2efa687'], - }), - ('classInt', '0.4-3', { - 'checksums': ['9ede7a2a7a6b6c114919a3315a884fb592e33b037a50a4fe45cbd4fe2fc434ac'], - }), - ('vegan', '2.5-6', { - 'checksums': ['b3c00aceb3db38101960515658e2b9ec1552439c3ed4e26e72989f18eccbc03c'], - }), - ('progress', '1.2.2', { - 'checksums': ['b4a4d8ed55db99394b036a29a0fb20b5dd2a91c211a1d651c52a1023cc58ff35'], - }), - ('rncl', '0.8.4', { - 'checksums': ['6b19d0dd9bb08ecf99766be5ad684bcd1894d1cd9291230bdd709dbd3396496b'], - }), - ('XML', '3.99-0.3', { - 'checksums': ['81b7a76308f3b7378dff525eff0180bba73b31117483a26cc3aa172d15c7f753'], - }), - ('tinytex', '0.22', { - 'checksums': ['6bbcbc907cad14bc0a583670bad1d9648d1f1cedd364354042aee83bb6302e69'], - }), - ('rmarkdown', '2.1', { - 'checksums': ['ef450e21206c454aa78eeca9023bbc78d1b2939e0b4bed9fdec9f2bf81ee455d'], - }), - ('reshape', '0.8.8', { - 'checksums': ['4d5597fde8511e8fe4e4d1fd7adfc7ab37ff41ac68c76a746f7487d7b106d168'], - }), - ('xml2', '1.3.2', { - 'checksums': ['df22f9e7e3189d8c9b8804eaf0105324fdac983cffe743552f6d76613600a4cf'], - }), - ('triebeard', '0.3.0', { - 'checksums': ['bf1dd6209cea1aab24e21a85375ca473ad11c2eff400d65c6202c0fb4ef91ec3'], - }), - ('urltools', '1.7.3', { - 'checksums': ['6020355c1b16a9e3956674e5dea9ac5c035c8eb3eb6bbdd841a2b5528cafa313'], - }), - ('httpcode', '0.3.0', { - 'checksums': ['593a030a4f94c3df8c15576837c17344701bac023ae108783d0f06c476062f76'], - }), - ('crul', '0.9.0', { - 'checksums': ['a7b42c69ca31648a419b93c618d32d0613f3ea053e45d584e84ef422ccf531c0'], - }), - ('bold', '0.9.0', { - 'checksums': ['45e844a83f4545a2f84887e36db83113da824a8673fa039f067a3bd7ee82ed5e'], - }), - ('rredlist', '0.6.0', { - 'checksums': ['bed33680f4e36f0f357d5785b631ae91232c8593a7517f1c0a4199d4e1e28332'], - }), - ('rentrez', '1.2.2', { - 'checksums': ['e5cb4265fd06d2ed0e11da3667ba79f7f2c8816005ba72cf5f53b8cf02dc193e'], - }), - ('rotl', '3.0.10', { - 'checksums': ['38b4679fe2d5407f7d0799d624ae8ea5d73ec0b6531b0e3d48246dea5575073a'], - }), - ('solrium', '1.1.4', { - 'checksums': ['5fccdb455746493c56e4df91f01ea9e89cdf0d67cfa5f958ca246b9207d20375'], - }), - ('ritis', '0.9.0', { - 'checksums': ['4abbe6c860fd3e465116573c9b2f119dbbd0046646844551523188ded63f0f6c'], - }), - ('worrms', '0.4.0', { - 'checksums': ['8480c56a4412662a383103fef68e73fcf14e94fcb878c25df8c6d5a8c0146059'], - }), - ('natserv', '0.4.0', { - 'checksums': ['ba7ef96290b4713e46197cc872d5400710086dc3668717d67995ee3de3d19c87'], - }), - ('WikipediR', '1.5.0', { - 'checksums': ['f8d0e6f04fb65f7ad9c1c068852a6a8b699ffe8d39edf1f3fa07d32d087e8ff0'], - }), - ('WikidataR', '1.4.0', { - 'checksums': ['64b1d53d7023249b73a77a7146adc3a8957b7bf3d808ebd6734795e9f58f4b2a'], - }), - ('wikitaxa', '0.3.0', { - 'checksums': ['10dbabac6c56c1d0f33a66ff9b4f48b0bcb470711808a86863b48dc1140ec86c'], - }), - ('phangorn', '2.5.5', { - 'checksums': ['c58dc1ace26cb4358619a15da3ea4765dbdde1557acccc5103c85589a7571346'], - }), - ('taxize', '0.9.95', { - 'checksums': ['8a27d81678e60f67082d9b0b3cd104fe531ea2be2d9073a20cab016259228834'], - }), - ('uuid', '0.1-4', { - 'checksums': ['98e0249dda17434bfa209c2058e9911e576963d4599be9f7ea946e664f8ca93e'], - }), - ('RNeXML', '2.4.3', { - 'checksums': ['bf801c93da4d5a59c92d17c15c04072e1ba4f72c50461a1e1eda2d446109a925'], - }), - ('phylobase', '0.8.10', { - 'checksums': ['5a44380ff49bab333a56f6f96157324ade8afb4af0730e013194c4badb0bf94b'], - }), - ('magick', '2.3', { - 'checksums': ['a8412512a132a74ed88fbe64a0a415e9ba5437a1b8a664990638e10915274ba0'], - }), - ('animation', '2.6', { - 'checksums': ['90293638920ac436e7e4de76ebfd92e1643ccdb0259b62128f16dd0b13245b0a'], - }), - ('bigmemory.sri', '0.1.3', { - 'checksums': ['55403252d8bae9627476d1f553236ea5dc7aa6e54da6980526a6cdc66924e155'], - }), - ('bigmemory', '4.5.36', { - 'checksums': ['18c67fbe6344b2f8223456c4f19ceebcf6c1166255eab81311001fd67a45ef0e'], - }), - ('calibrate', '1.7.5', { - 'checksums': ['33f4f6874f0a979af3ce592ed1105e829d3df1fbf05c6e0cd3829a13b21d82e8'], - }), - ('clusterGeneration', '1.3.4', { - 'checksums': ['7c591ad95a8a9d7fb0e4d5d80dfd78f7d6a63cf7d11eb53dd3c98fdfb5b868aa'], - }), - ('dismo', '1.1-4', { - 'checksums': ['f2110f716cd9e4cca5fd2b22130c6954658aaf61361d2fe688ba22bbfdfa97c8'], - }), - ('extrafontdb', '1.0', { - 'checksums': ['faa1bafee5d4fbc24d03ed237f29f1179964ebac6e3a46ac25b0eceda020b684'], - }), - ('Rttf2pt1', '1.3.8', { - 'checksums': ['560646d4488bf70edd8f785a99e8208e7fd004014e29cb52b050fb55e7176e2c'], - }), - ('extrafont', '0.17', { - 'checksums': ['2f6d7d79a890424b56ddbdced361f8b9ddede5edd33e090b816b88a99315332d'], - }), - ('fields', '10.3', { - 'checksums': ['490bff3637edd6d42b578776648be031486fc38cdbe668fd46b07c2add3e698a'], - }), - ('shapefiles', '0.7', { - 'checksums': ['eeb18ea4165119519a978d4a2ba1ecbb47649deb96a7f617f5b3100d63b3f021'], - }), - ('fossil', '0.4.0', { - 'checksums': ['37c082fa15ebae89db99d6071b2bb2cad6a97a0405e9b4ef77f62a8f6ad274c1'], - }), - ('geiger', '2.0.6.4', { - 'checksums': ['8ddc12779b86b14b173a5c72a28c4e22784e4a7a48e6c806e48a097c2928af64'], - }), - ('shape', '1.4.4', { - 'checksums': ['f4cb1b7d7c84cf08d2fa97f712ea7eb53ed5fa16e5c7293b820bceabea984d41'], - }), - ('glmnet', '3.0-2', { - 'checksums': ['f48956a75af7e2be045198873fc9eb637a549af1db83dcf76cac3774bfb3762c'], - }), - ('crosstalk', '1.1.0.1', { - 'checksums': ['36a70b10bc11826e314c05f9579fd791b9ac3b3a2cfed4d4ca74ce1ad991300e'], - }), - ('miniUI', '0.1.1.1', { - 'checksums': ['452b41133289f630d8026507263744e385908ca025e9a7976925c1539816b0c0'], - }), - ('webshot', '0.5.2', { - 'checksums': ['f183dc970157075b51ac543550a7a48fa3428b9c6838abb72fe987c21982043f'], - }), - ('manipulateWidget', '0.10.1', { - 'checksums': ['9d621192121f6b516bc7f1a18305995bfb7838c6683ac701422afc03a50e27ee'], - }), - ('rgl', '0.100.54', { - 'checksums': ['17b7f8f135f526aba17dc516952f692daa7a7d6e787157fdff8dd5175113fad5'], - }), - ('Rtsne', '0.15', { - 'checksums': ['56376e4f0a382fad3d3d40e2cb0562224be5265b827622bcd235e8fc63df276c'], - }), - ('labdsv', '2.0-1', { - 'checksums': ['5a4d55e9be18222dc47e725008b450996448ab117d83e7caaa191c0f13fd3925'], - }), - ('stabs', '0.6-3', { - 'checksums': ['e961ae21d45babc1162b6eeda874c4e3677fc286fd06f5427f071ad7a5064a9f'], - }), - ('modeltools', '0.2-23', { - 'checksums': ['6b3e8d5af1a039db5c178498dbf354ed1c5627a8cea9229726644053443210ef'], - }), - ('strucchange', '1.5-2', { - 'checksums': ['7d247c5ae6f5a63c80e478799d009c57fb8803943aa4286d05f71235cc1002f8'], - }), - ('TH.data', '1.0-10', { - 'checksums': ['618a1c67a30536d54b1e48ba3af46a6edcd6c2abef17935b5d4ba526a43aff55'], - }), - ('multcomp', '1.4-13', { - 'checksums': ['d30f0357b8307e7feb574d6d307e0ebc6bdca66b2cc172980fa5309685885fdb'], - }), - ('libcoin', '1.0-5', { - 'checksums': ['0a744164e00557d2f3e888d14cfd6108d17c14e983db620f74c7a5475be8a9b2'], - }), - ('matrixStats', '0.56.0', { - 'checksums': ['39e34a3dc480b9df05bb1a555eaef1dc1971a53f3ea6e01eb3a68bd1d3760f27'], - }), - ('coin', '1.3-1', { - 'checksums': ['5de2519a6e2b059bba9d74c58085cccaff1aaaa0454586ed164a108ebd1b2062'], - }), - ('party', '1.3-4', { - 'checksums': ['7689bd4fe7968ef1981147c5ad11237d630eddd5789a05c090339898eff71e7f'], - }), - ('inum', '1.0-1', { - 'checksums': ['3c2f94c13c03607e05817e4859595592068b55e810fed94e29bc181ad248a099'], - }), - ('partykit', '1.2-7', { - 'checksums': ['5c993c729c2975095eb27e6363eeb1c8a8ba22035f226f598af9d43a4ca312c1'], - }), - ('mboost', '2.9-2', { - 'checksums': ['34c6ba2051adc5ff429a594f7144bffcb7b129d5ff7c28a14cf21f38dbd554aa'], - }), - ('msm', '1.6.8', { - 'checksums': ['f3f18a9ea622a6d56f0f6d675b4890081d6def8b91a694c6764dac0d1cf262b4'], - }), - ('nor1mix', '1.3-0', { - 'checksums': ['9ce4ee92f889a4a4041b5ea1ff09396780785a9f12ac46f40647f74a37e327a0'], - }), - ('np', '0.60-10', { - 'checksums': ['a27b4bbca8b83a289c98920c1c8f5e9979ba9772086893252a4297dd2698081a'], - }), - ('polynom', '1.4-0', { - 'checksums': ['c5b788b26f7118a18d5d8e7ba93a0abf3efa6603fa48603c70ed63c038d3d4dd'], - }), - ('polspline', '1.1.17', { - 'checksums': ['d67b269d01105d4a6ea774737e921e66e065a859d1931ae38a70f88b6fb7ee30'], - }), - ('rms', '5.1-4', { - 'checksums': ['38f5844c4944a95b2adebea6bb1d163111270b8662399ea0349c45c0758076a6'], - }), - ('RWekajars', '3.9.3-2', { - 'checksums': ['16e6b019aab1646f89c5203f0d6fc1cb800129e5169b15aaef30fd6236f5da1a'], - }), - ('RWeka', '0.4-42', { - 'checksums': ['84e53028875d4603bd073c77709941d358152b8274977d45934fa89121b02104'], - }), - ('slam', '0.1-47', { - 'checksums': ['696356a68aa92059fa794444faa4c1775193c723a262a5f75de3c3c3047fcf89'], - }), - ('tm', '0.7-7', { - 'checksums': ['d0dbe41ff8414bdc2eee06a1b0d6db4567850135c4c6ff0a9c9ca8239166d15f'], - }), - ('TraMineR', '2.2-0', { - 'checksums': ['eeaeaf5151ec7a6b7179fd04dbdfb16479b4893e1547ccc29be74e444691d0f6'], - }), - ('chemometrics', '1.4.2', { - 'checksums': ['b705832fa167dc24b52b642f571ed1efd24c5f53ba60d02c7797986481b6186a'], - }), - ('FNN', '1.1.3', { - 'checksums': ['de763a25c9cfbd19d144586b9ed158135ec49cf7b812938954be54eb2dc59432'], - }), - ('ipred', '0.9-9', { - 'checksums': ['0da87a70730d5a60b97e46b2421088765e7d6a7cc2695757eba0f9d31d86416f'], - }), - ('miscTools', '0.6-26', { - 'checksums': ['be3c5a63ca12ce7ce4d43767a1815cd3dcf32664728ade251cfb03ea6f77fc9a'], - }), - ('maxLik', '1.3-8', { - 'checksums': ['33404d10bfe7746cab8227b880b50808a63909036daf6fedbac94c75ac68dfe5'], - }), - ('gbRd', '0.4-11', { - 'checksums': ['0251f6dd6ca987a74acc4765838b858f1edb08b71dbad9e563669b58783ea91b'], - }), - ('Rdpack', '0.11-1', { - 'checksums': ['58020f150be07209fd1fdd7f5e58c138863e850f4e4c1512d69250286e091e20'], - }), - ('mlogit', '1.0-3.1', { - 'checksums': ['e4b601d8f0d0bcd1c63468ab88aa305355d2811c60b038a5ba4b99245cf59b0c'], - }), - ('getopt', '1.20.3', { - 'checksums': ['531f5fdfdcd6b96a73df2b39928418de342160ac1b0043861e9ea844f9fbf57f'], - }), - ('gsalib', '2.1', { - 'checksums': ['e1b23b986c18b89a94c58d9db45e552d1bce484300461803740dacdf7c937fcc'], - }), - ('optparse', '1.6.6', { - 'checksums': ['51779d497146e9354b1153713d939e81551e08948c2b00e4b117b1377c0b60d0'], - }), - ('labelled', '2.3.0', { - 'checksums': ['9f16f168436039d7881d535a9f15fb0dce752fd3a28bce89192718cdbd043a50'], - }), - ('questionr', '0.7.0', { - 'checksums': ['c4566880a1ca8f01faad396e20d907d913f4a252acaf83a0cb508a3738874cb3'], - }), - ('klaR', '0.6-15', { - 'checksums': ['5bfe5bc643f8a64b222317732c26e9f93be297cdc318a869f15cc9ab0d9e0fae'], - }), - ('neuRosim', '0.2-12', { - 'checksums': ['f4f718c7bea2f4b61a914023015f4c71312f8a180124dcbc2327b71b7be256c3'], - }), - ('locfit', '1.5-9.4', { - 'checksums': ['d9d3665c5f3d49f698fb4675daf40a0550601e86db3dc00f296413ceb1099ced'], - }), - ('GGally', '1.5.0', { - 'checksums': ['069261cd469e2d2c8c794b2956e69c356b471eccfc45a60c55e55dfd83185a20'], - }), - ('beanplot', '1.2', { - 'checksums': ['49da299139a47171c5b4ccdea79ffbbc152894e05d552e676f135147c0c9b372'], - }), - ('clValid', '0.6-6', { - 'checksums': ['c13ef1b6258e34ba53615b78f39dbe4d8ba47b976b3c24a3eedaecf5ffba19ed'], - }), - ('DiscriMiner', '0.1-29', { - 'checksums': ['5aab7671086ef9940e030324651976456f0e84dab35edb7048693ade885228c6'], - }), - ('ellipse', '0.4.1', { - 'checksums': ['1a9a9c52195b26c2b4d51ad159ab98aff7aa8ca25fdc6b2198818d1a0adb023d'], - }), - ('leaps', '3.1', { - 'checksums': ['3d7c3a102ce68433ecf167ece96a7ebb4207729e4defd0ac8fc00e7003f5c3b6'], - }), - ('pbkrtest', '0.4-8.6', { - 'checksums': ['5f863b167968d97ea504f3fffabc1b4c922e244d4e194e013229960d3384bd68'], - }), - ('carData', '3.0-3', { - 'checksums': ['986b84bdd289159eead8b050ea82600a4f77bf0bbe0293a7c7b25d607ff7e231'], - }), - ('maptools', '0.9-9', { - 'checksums': ['69ba3b2cd50260f78fb6c25cf0557b4a0d31498d6a4f4ff00e466334fba4946c'], - }), - ('zip', '2.0.4', { - 'checksums': ['ab5dd0c63bd30b478d0f878735e7baf36e2e76e4d12d2b4b8eddd03b665502b0'], - }), - ('openxlsx', '4.1.4', { - 'checksums': ['07a38b21f6ce6e92d58d7a51ea9f4b5fd77db49b019a18ba9ecea69878a39dd7'], - }), - ('rematch', '1.0.1', { - 'checksums': ['a409dec978cd02914cdddfedc974d9b45bd2975a124d8870d52cfd7d37d47578'], - }), - ('cellranger', '1.1.0', { - 'checksums': ['5d38f288c752bbb9cea6ff830b8388bdd65a8571fd82d8d96064586bd588cf99'], - }), - ('readxl', '1.3.1', { - 'checksums': ['24b441713e2f46a3e7c6813230ad6ea4d4ddf7e0816ad76614f33094fbaaaa96'], - }), - ('rio', '0.5.16', { - 'checksums': ['d3eb8d5a11e0a3d26169bb9d08f834a51a6516a349854250629072d59c29d465'], - }), - ('car', '3.0-7', { - 'checksums': ['ad98a2f0f47105285d6677b398fc1b169cc20458e799e05dae47c84068984e87'], - }), - ('flashClust', '1.01-2', { - 'checksums': ['48a7849bb86530465ff3fbfac1c273f0df4b846e67d5eee87187d250c8bf9450'], - }), - ('ggrepel', '0.8.2', { - 'checksums': ['0d01bfc005e9af5e6b57e2a677781424387f38ec208818295eb87dd5867551e1'], - }), - ('FactoMineR', '2.3', { - 'checksums': ['c64f30a3839a375395a3b7d8a4131e1df74aea31da6348d7a506eaa9da70af51'], - }), - ('flexclust', '1.4-0', { - 'checksums': ['82fe445075a795c724644864c7ee803c5dd332a89ea9e6ccf7cd1ae2d1ecfc74'], - }), - ('flexmix', '2.3-15', { - 'checksums': ['ba444c0bfe33ab87d440ab590c06b03605710acd75811c1622253171bb123f43'], - }), - ('prabclus', '2.3-2', { - 'checksums': ['f421bcbcb557281e0de4a06b15f9a496adb5c640e883c0f7bb12051efc69e441'], - }), - ('diptest', '0.75-7', { - 'checksums': ['462900100ca598ef21dbe566bf1ab2ce7c49cdeab6b7a600a50489b05f61b61b'], - }), - ('trimcluster', '0.1-5', { - 'checksums': ['9239f20e4a06ac2fa89e5d5d89b23a45c8c534a7264d89bede8a35d43dda518b'], - }), - ('fpc', '2.2-5', { - 'checksums': ['45855d446593b93ea0873d701a6c7c6b47335a67ab34066e4cc8ae1d3f24a080'], - }), - ('BiasedUrn', '1.07', { - 'checksums': ['2377c2e59d68e758a566452d7e07e88663ae61a182b9ee455d8b4269dda3228e'], - }), - ('TeachingDemos', '2.12', { - 'checksums': ['3e75405ce1affa406d6df85e06f96381412bc7a2810b25d8c81bfe64c4698644'], - }), - ('kohonen', '3.0.10', { - 'checksums': ['996956ea46a827c9f214e4f940a19304a0ff35bda707d4d7312f80d3479067b2'], - }), - ('base64', '2.0', { - 'checksums': ['8e259c2b12446197d1152b83a81bab84ccb5a5b77021a9b5645dd4c63c804bd1'], - }), - ('doRNG', '1.8.2', { - 'checksums': ['33e9d45b91b0fde2e35e911b9758d0c376049121a98a1e4c73a1edfcff11cec9'], - }), - ('nleqslv', '3.3.2', { - 'checksums': ['f54956cf67f9970bb3c6803684c84a27ac78165055745e444efc45cfecb63fed'], - }), - ('Deriv', '4.0', { - 'checksums': ['76788764177b24dc27f4e27046fa563ad97014e0d53e14a880ebff2f9177b40e'], - }), - ('RGCCA', '2.1.2', { - 'checksums': ['20f341fca8f616c556699790814debdf2ac7aa4dd9ace2071100c66af1549d7d'], - }), - ('pheatmap', '1.0.12', { - 'checksums': ['579d96ee0417203b85417780eca921969cda3acc210c859bf9dfeff11539b0c1'], - }), - ('pvclust', '2.2-0', { - 'checksums': ['7892853bacd413b5a921006429641ad308a344ca171b3081c15e4c522a8b0201'], - }), - ('RCircos', '1.2.1', { - 'checksums': ['3b9489ab05ea83ead99ca6e4a1e6830467a2064779834aff1317b42bd41bb8fd'], - }), - ('lambda.r', '1.2.4', { - 'checksums': ['d252fee39065326c6d9f45ad798076522cec05e73b8905c1b30f95a61f7801d6'], - }), - ('futile.options', '1.0.1', { - 'checksums': ['7a9cc974e09598077b242a1069f7fbf4fa7f85ffe25067f6c4c32314ef532570'], - }), - ('futile.logger', '1.4.3', { - 'checksums': ['5e8b32d65f77a86d17d90fd8690fc085aa0612df8018e4d6d6c1a60fa65776e4'], - }), - ('VennDiagram', '1.6.20', { - 'checksums': ['e51cb3fff23c6ec8191966490bf875a7415f8725d4054bae881a25febb9281c5'], - }), - ('xlsxjars', '0.6.1', { - 'checksums': ['37c1517f95f8bca6e3514429394d2457b9e62383305eba288416fb53ab2e6ae6'], - }), - ('xlsx', '0.6.3', { - 'checksums': ['e5a9b8ead1b4502e7a1143a1d842d4994dd92f333a95a00d81a27ef62c5e035e'], - }), - ('uroot', '2.1-0', { - 'checksums': ['3c02a9dadd22aa67a59e99007ab6f576dc428859fa746d3a8f3ffa2bb43d18c2'], - }), - ('forecast', '8.12', { - 'checksums': ['eb607fd584d66abf39b14c00d50111304e892a6e0778c9a8354195c6c92f92f9'], - }), - ('fma', '2.4', { - 'checksums': ['69a94c3bd464176a80232d49fcd04d478d4dd59f9bf128d6a9f46e49612d27f4'], - }), - ('expsmooth', '2.3', { - 'checksums': ['ac7da36347f983d6ec71715daefd2797fe2fc505c019f4965cff9f77ce79982a'], - }), - ('fpp', '0.5', { - 'checksums': ['9c87dd8591b8a87327cae7a03fd362a5492495a96609e5845ccbeefb96e916cb'], - }), - ('tensor', '1.5', { - 'checksums': ['e1dec23e3913a82e2c79e76313911db9050fb82711a0da227f94fc6df2d3aea6'], - }), - ('polyclip', '1.10-0', { - 'checksums': ['74dabc0dfe5a527114f0bb8f3d22f5d1ae694e6ea9345912909bae885525d34b'], - }), - ('goftest', '1.2-2', { - 'checksums': ['e497992666b002b6c6bed73bf05047ad7aa69eb58898da0ad8f1f5b2219e7647'], - }), - ('spatstat.utils', '1.17-0', { - 'checksums': ['39cd683ed7f41d8adc9e28af073d91b244aa1cf5ad966dfbb396ee3ee79f0922'], - }), - ('spatstat.data', '1.4-3', { - 'checksums': ['8955b6ac40cc7d0d89e02334bb46f4c223ff0755e5818f132fee753e77918ea2'], - }), - ('spatstat', '1.63-3', { - 'checksums': ['07b4a1a1b37c91944f31779dd789598f4a5ad047a3de3e9ec2ca99b9e9565528'], - }), - ('pracma', '2.2.9', { - 'checksums': ['0cea0ff5e88643df121e07b9aebfe57084c61e11801680039752f371fe87bf1e'], - }), - ('RCurl', '1.98-1.2', { - 'checksums': ['5d74a0cdc3c5684b0348b959f67039e3c2a5da2bbb6176f6800a94124895a7a8'], - }), - ('bio3d', '2.4-1', { - 'checksums': ['679fbd87fe9fb82a65427d281d3b68906509e411270cd87d2deb95d404333c1f'], - }), - ('AUC', '0.3.0', { - 'checksums': ['e705f2c63d336249d19187f3401120d738d42d323fce905f3e157c2c56643766'], - }), - ('interpretR', '0.2.4', { - 'checksums': ['4c08a6dffd6fd5764f27812f3a085c53e6a21d59ae82d903c9c0da93fd1dd059'], - }), - ('cvAUC', '1.1.0', { - 'checksums': ['c4d8ed53b93869650aa2f666cf6d1076980cbfea7fa41f0b8227595be849738d'], - }), - ('SuperLearner', '2.0-26', { - 'checksums': ['4462922c8daae2773f79ecdea7ca3cc4ea51bfd101c5e6c1ad22f9190e746081'], - }), - ('mediation', '4.5.0', { - 'checksums': ['210206618787c395a67689be268283df044deec7199d9860ed95218ef1e60845'], - }), - ('ModelMetrics', '1.2.2.2', { - 'checksums': ['5e06f1926aebca5654e1329c66ef19b04058376b2277ebb16e3bf8c208d73457'], - }), - ('CVST', '0.2-2', { - 'checksums': ['854b8c983427ecf9f2f7798c4fd1c1d06762b5b0bcb1045502baadece6f78316'], - }), - ('DRR', '0.0.4', { - 'checksums': ['93e365a4907e301ae01f7d943e6bdcda71ef23c51a4759ba3c94bcf842d4e0f8'], - }), - ('dimRed', '0.2.3', { - 'checksums': ['e6e56e3f6999ebdc326e64ead5269f3aaf61dd587beefafb7536ac3890370d84'], - }), - ('lubridate', '1.7.8', { - 'checksums': ['3da19922fc373e113ecc58c4984955ba26da703edc9c991bd444b7077d4b553c'], - }), - ('ddalpha', '1.3.11', { - 'checksums': ['c30b4a3a9549cb4dc0a8e51e06f5b6e4c457c5326acc8f4680968c920f59b6e9'], - }), - ('gower', '0.2.1', { - 'checksums': ['af3fbe91cf818c0841b2c0ec4ddf282c182a588031228c8d88f7291b2cdff100'], - }), - ('RcppRoll', '0.3.0', { - 'checksums': ['cbff2096443a8a38a6f1dabf8c90b9e14a43d2196b412b5bfe5390393f743f6b'], - }), - ('recipes', '0.1.10', { - 'checksums': ['4f345e31568e41b3efb6c6333e8ccab032e293dbd0256299d922fe6c9532c985'], - }), - ('caret', '6.0-86', { - 'checksums': ['da4a1c7c3fbf645c5b02871e563a77404622b83623f0d1c5dc1425de7aa4ce37'], - }), - ('adabag', '4.2', { - 'checksums': ['47019eb8cefc8372996fbb2642f64d4a91d7cedc192690a8d8be6e7e03cd3c81'], - }), - ('parallelMap', '1.5.0', { - 'checksums': ['4afa727f4786279718cc799e45e91859a46f5cbc1ee652b0f47ae3b9f9d45e4e'], - }), - ('ParamHelpers', '1.14', { - 'checksums': ['b17652d0a69de3241a69f20be4ad1bfe02c413328a17f3c1ac7b73886a6ba2eb'], - }), - ('ggvis', '0.4.5', { - 'checksums': ['82373c3565c299279f6849f798cc39127b2b3f7ff2deee1946528474824b3124'], - }), - ('mlr', '2.17.1', { - 'checksums': ['0b71b9d00c627647cf5fc1f456d4445f025c90be2f974e05ccdb84e25ba1923b'], - }), - ('unbalanced', '2.0', { - 'checksums': ['9be32b1ce9d972f1abfff2fbe18f5bb5ba9c3f4fb1282063dc410b82ad4d1ea2'], - }), - ('RSNNS', '0.4-12', { - 'checksums': ['b18dfeda71573bc92c6888af72da407651bff7571967965fd3008f0d331743b9'], - }), - ('abc.data', '1.0', { - 'checksums': ['b242f43c3d05de2e8962d25181c6b1bb6ca1852d4838868ae6241ca890b161af'], - }), - ('abc', '2.1', { - 'checksums': ['0bd2dcd4ee1915448d325fb5e66bee68e0497cbd91ef67a11b400b2fbe52ff59'], - }), - ('lhs', '1.0.2', { - 'checksums': ['e2945192740fb088b210786006b311d3d4e7da967733a1998380d597320c1158'], - }), - ('tensorA', '0.36.1', { - 'checksums': ['c7ffe12b99867675b5e9c9f31798f9521f14305c9d9f9485b171bcbd8697d09c'], - }), - ('EasyABC', '1.5', { - 'checksums': ['1dd7b1383a7c891cafb34d9cec65d92f1511a336cff1b219e63c0aa791371b9f'], - }), - ('whisker', '0.4', { - 'checksums': ['7a86595be4f1029ec5d7152472d11b16175737e2777134e296ae97341bf8fba8'], - }), - ('commonmark', '1.7', { - 'checksums': ['d14a767a3ea9778d6165f44f980dd257423ca6043926e3cd8f664f7171f89108'], - }), - ('roxygen2', '7.1.0', { - 'checksums': ['7e9b36f6e7c01a5c8c4747340b3d0c064ce2e48c93fcfbfe45139854fae74103'], - }), - ('git2r', '0.26.1', { - 'checksums': ['13d609286a0af4ef75ba76f2c2f856593603b8014e311b88896243a50b417435'], - }), - ('rversions', '2.0.1', { - 'checksums': ['51ec1f64e7d628e88d716a020d5d521eba71d472e3c9ae7b694428ef6dd786c5'], - }), - ('xopen', '1.0.0', { - 'checksums': ['e207603844d69c226142be95281ba2f4a056b9d8cbfae7791ba60535637b3bef'], - }), - ('sessioninfo', '1.1.1', { - 'checksums': ['166b04678448a7decd50f24afabe5e2ad613e3c55b180ef6e8dd7a870a1dae48'], - }), - ('rcmdcheck', '1.3.3', { - 'checksums': ['1ab679eb1976d74cd3be5bcad0af7fcc673dbdfd4406bbce32591c8fddfb93b4'], - }), - ('remotes', '2.1.1', { - 'checksums': ['4e590746fce618094089372b185e1ea234b3337b23c44c44118e942d0fb5118b'], - }), - ('fs', '1.4.1', { - 'checksums': ['ae9103dff26ca56a34901408bd650a2949f491b2a0886c686a51a179d38b7a4e'], - }), - ('clisymbols', '1.2.0', { - 'checksums': ['0649f2ce39541820daee3ed408d765eddf83db5db639b493561f4e5fbf88efe0'], - }), - ('ini', '0.3.1', { - 'checksums': ['7b191a54019c8c52d6c2211c14878c95564154ec4865f57007953742868cd813'], - }), - ('gh', '1.1.0', { - 'checksums': ['de9faf383c3fe5e87a75391d82cf71b1331b3c80cd00c4203146a303825d89ad'], - }), - ('rematch2', '2.1.1', { - 'checksums': ['d0423a418e8b46ac3a4819af7a7d19c39ca7c8c862c1e9a1c1294aa19152518f'], - }), - ('usethis', '1.6.1', { - 'checksums': ['60339059a97ed07dea7f8908b828b5bb42e0fd0b471165c061bc9660b0d59d6f'], - }), - ('DT', '0.13', { - 'checksums': ['79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5'], - }), - ('rex', '1.2.0', { - 'checksums': ['06b491f1469078862e40543fd74e1d38b2e0fb61fdf01c8083add4b11ac2eb54'], - }), - ('covr', '3.5.0', { - 'checksums': ['cb919912018130164a40803ac573a37dde2186678c058c03c6303d79604979df'], - }), - ('devtools', '2.3.0', { - 'checksums': ['4fc375c171335c67bd71df4e0b1b3dff2ae3aa17b3e0566b790ba0808b39dcd0'], - }), - ('Rook', '1.1-1', { - 'checksums': ['00f4ecfa4c5c57018acbb749080c07154549a6ecaa8d4130dd9de79427504903'], - }), - ('Cairo', '1.5-12', { - 'checksums': ['4e08eafb8c44045d16674ee5ae659f182ffe13ca86076fb077832947aa4a620b'], - }), - ('RMTstat', '0.3', { - 'checksums': ['81eb4c5434d04cb66c749a434c33ceb1c07d92ba79765d4e9233c13a092ec2da'], - }), - ('Lmoments', '1.3-1', { - 'checksums': ['7c9d489a08f93fa5877e2f233ab9732e0d1b2761596b3f6ac91f2295e41a865d'], - }), - ('distillery', '1.0-7', { - 'checksums': ['898833ceceed5291b4a02bf62c6fa5b78dd7837f9cc5a42b87a08672c7dae270'], - }), - ('extRemes', '2.0-11', { - 'checksums': ['75fbdeef677c81cf5661b8df3df4090c55f53e9bb96bb138b498eb0fbbf5af42'], - }), - ('tkrplot', '0.0-24', { - 'checksums': ['2873630a37d7ae1e09a5803d9a89ca0494edd83526c7b1860d9246543722f311'], - }), - ('misc3d', '0.8-4', { - 'checksums': ['75de3d2237f67f9e58a36e80a6bbf7e796d43eb46789f2dd1311270007bf5f62'], - }), - ('multicool', '0.1-11', { - 'checksums': ['1c907e64af2ac39facdf431a5691e69649f64af1f50e198ae39da5bf30026476'], - }), - ('plot3D', '1.3', { - 'checksums': ['b9e4ec2789e34ad249318900e186868650e1a33466b385cb492a45466db3dfc9'], - }), - ('plot3Drgl', '1.0.1', { - 'checksums': ['466d428d25c066c9c96d892f24da930513d42b1bdf76d3b53628c3ba13c3e48a'], - }), - ('OceanView', '1.0.5', { - 'checksums': ['c16e1bed97f4ede46dc017fdd6bd7575d925b57bd2601317bd3ad2357609f885'], - }), - ('ks', '1.11.7', { - 'checksums': ['6a6d9c2366e85a4c6af39b798f3798d20a42615ddfcebcedf6cf56087cdfd2b8'], - }), - ('logcondens', '2.1.5', { - 'checksums': ['72e61abc1f3eb28830266fbe5b0da0999eb5520586000a3024e7c26be93c02eb'], - }), - ('Iso', '0.0-18', { - 'checksums': ['2d7e8c4452653364ee086d95cea620c50378e30acfcff129b7261e1756a99504'], - }), - ('penalized', '0.9-51', { - 'checksums': ['eaa80dca99981fb9eb576261f30046cfe492d014cc2bf286c447b03a92e299fd'], - }), - ('clusterRepro', '0.9', { - 'checksums': ['940d84529ff429b315cf4ad25700f93e1156ccacee7b6c38e4bdfbe2d4c6f868'], - }), - ('randomForestSRC', '2.9.3', { - 'checksums': ['bc47bef9e5afade8fdf56e08ae0ad320e424dfa5b11a32cd2d166c9988dc2e16'], - }), - ('sm', '2.2-5.6', { - 'checksums': ['b890cd7ebe8ed711ab4a3792c204c4ecbe9e6ca1fd5bbc3925eba5833a839c30'], - }), - ('pbivnorm', '0.6.0', { - 'checksums': ['07c37d507cb8f8d2d9ae51a9a6d44dfbebd8a53e93c242c4378eaddfb1cc5f16'], - }), - ('lavaan', '0.6-5', { - 'checksums': ['feeb6e1b419aa1d54fd5af1d67260b5d13ff251c19de8136a4df565305d47b12'], - }), - ('matrixcalc', '1.0-3', { - 'checksums': ['17e6caeeecd596b850a6caaa257984398de9ec5d2b41ce83c428f112614b9cb0'], - }), - ('arm', '1.11-1', { - 'checksums': ['7b82dbe8c5141546d11b0af656a6addda4c07f06fc165d01c7c1e39540b55444'], - }), - ('mi', '1.0', { - 'checksums': ['34f44353101e8c3cb6bf59c5f4ff5b2391d884dcbb9d23066a11ee756b9987c0'], - }), - ('visNetwork', '2.0.9', { - 'checksums': ['5e0b3dc3a91e66e0a359433f03cc856d04b981b0f9ad228d8fa9c96b7fcaa420'], - }), - ('servr', '0.16', { - 'checksums': ['cc950bedbd52f2d93c54157dc5b261113be6baee2d9e90e99a8de048c09fda80'], - }), - ('rgexf', '0.16.0', { - 'checksums': ['2a671df9ac70cfefd4092754317cb28e32a33df345b80e1975bf838e838245ee'], - }), - ('influenceR', '0.1.0', { - 'checksums': ['4fc9324179bd8896875fc0e879a8a96b9ef2a6cf42a296c3b7b4d9098519e98a'], - }), - ('downloader', '0.4', { - 'checksums': ['1890e75b028775154023f2135cafb3e3eed0fe908138ab4f7eff1fc1b47dafab'], - }), - ('DiagrammeR', '1.0.5', { - 'checksums': ['0877af707925b03c58a7e00cd84eb4e9906b551a61d86130ef4165477654e334'], - }), - ('sem', '3.1-9', { - 'checksums': ['4a33780202506543da85877cd2813250114420d6ec5e75457bc67477cd332cb9'], - }), - ('network', '1.16.0', { - 'checksums': ['a24f51457439c7186ffa1fe53719742c501929ac1a354e458754a83f280fce36'], - }), - ('statnet.common', '4.3.0', { - 'checksums': ['834a3359eac967df0420eee416ae4983e3b502a3de56bb24f494a7ca4104e959'], - }), - ('sna', '2.5', { - 'checksums': ['13b508cacb0bf1e79b55d5c8f7e9ada3b173468d4d6d5f1dc606990ac03071c8'], - }), - ('glasso', '1.11', { - 'checksums': ['4c37844b26f55985184a734e16b8fe880b192e3d2763614b0ab3f99b4530e30a'], - }), - ('huge', '1.3.4.1', { - 'checksums': ['78ef9eae464d52c5247998b9514a81b178419b857b1a6c00d885e3ae6c03a886'], - }), - ('d3Network', '0.5.2.1', { - 'checksums': ['5c798dc0c87c6d574abb7c1f1903346e6b0fec8adfd1df7aef5e4f9e7e3a09be'], - }), - ('BDgraph', '2.62', { - 'checksums': ['7e5de4406f4a7873bf948852291d2851a2ab312288467687dd5c0392b2723bac'], - }), - ('pbapply', '1.4-2', { - 'checksums': ['ac19f209f36f4fa3d0f5b14b6cc5b0c279996fb9d3e86c848c0f6d03c025b3f6'], - }), - ('graphlayouts', '0.7.0', { - 'checksums': ['20464b60376d9f8d522eec6a7495054b1715e4919f10e9a049868d8866398c9e'], - }), - ('tweenr', '1.0.1', { - 'checksums': ['efd68162cd6d5a4f6d833dbf785a2bbce1cb7b9f90ba3fb060931a4bd705096b'], - }), - ('ggforce', '0.3.1', { - 'checksums': ['a05271da9b226c12ae5fe6bc6eddb9ad7bfe19e1737e2bfcd6d7a89631332211'], - }), - ('tidygraph', '1.1.2', { - 'checksums': ['5642001d4cccb122d66481b7c61a06c724c02007cbd356ee61cb29726a56fafe'], - }), - ('ggraph', '2.0.2', { - 'checksums': ['80caab7a38f2548a9fcd1ff3655a6bdbcb776fe662e3d93c17798bf2a04078b2'], - }), - ('qgraph', '1.6.5', { - 'checksums': ['2295ccca41f84cba34ad0e6c1b31af8bde79bda7373754c255e0ee9e63d29e5f'], - }), - ('HWxtest', '1.1.9', { - 'checksums': ['a37309bed4a99212ca104561239d834088217e6c5e5e136ff022544c706f25e6'], - }), - ('diveRsity', '1.9.90', { - 'checksums': ['b8f49cdbfbd82805206ad293fcb2dad65b962fb5523059a3e3aecaedf5c0ee86'], - }), - ('doSNOW', '1.0.18', { - 'checksums': ['70e7bd82186e477e3d1610676d4c6a75258ac08f104ecf0dcc971550ca174766'], - }), - ('geepack', '1.3-1', { - 'checksums': ['823153ca28e1a8bd8a45de778279480c1c35e063d62c8955b6cea1602f28d6df'], - }), - ('biom', '0.3.12', { - 'checksums': ['4ad17f7811c7346dc4923bd6596a007c177eebb1944a9f46e5674afcc5fdd5a1'], - }), - ('pim', '2.0.2', { - 'checksums': ['1195dbdbd67348dfef4b6fc34fcec643da685ebe58d34bbe049ab121aca9944f'], - }), - ('minpack.lm', '1.2-1', { - 'checksums': ['14cb7dba3ef2b46da0479b46d46c76198e129a31f6157cd8b37f178adb15d5a3'], - }), - ('rootSolve', '1.8.2.1', { - 'checksums': ['488451182663197ae4513e46e24f72cadb2297d35a58a3007a0dbf1bf0833031'], - }), - ('diagram', '1.6.4', { - 'checksums': ['7c2bc5d5d634c3b8ca7fea79fb463e412962d88f47a77a74c811cc62f375ce38'], - }), - ('FME', '1.3.6.1', { - 'checksums': ['ae0c69f75614e2ef9f2096c205c7f8eb90485c6311213762c1416ece4036be18'], - }), - ('bmp', '0.3', { - 'checksums': ['bdf790249b932e80bc3a188a288fef079d218856cf64ffb88428d915423ea649'], - }), - ('tiff', '0.1-5', { - 'checksums': ['9514e6a9926fcddc29ce1dd12b1072ad8265900373f738de687ef4a1f9124e2b'], - }), - ('readbitmap', '0.1.5', { - 'checksums': ['737d7d585eb33de2c200da64d16781e3c9522400fe2af352e1460c6a402a0291'], - }), - ('imager', '0.42.1', { - 'checksums': ['cb9c0f8dbf1383951bf96f5aeded1e774c26135a0117279de7e728cb6822eab4'], - }), - ('signal', '0.7-6', { - 'checksums': ['6b60277b07cf0167f8272059b128cc82f27a9bab1fd33d74c2a9e1f2abca5def'], - }), - ('tuneR', '1.3.3', { - 'checksums': ['bdc3c2017b162d2ba0a249e80361a4f47202e763c21aecfc57380a482a3a692b'], - }), - ('pastecs', '1.3.21', { - 'checksums': ['8c1ef2affe88627f0b23295aa5edb758b8fd6089ef09f60f37c46445128b8d7c'], - }), - ('audio', '0.1-7', { - 'checksums': ['52e0397a45325aa9586ec68b94ab9e505bdefaf2a588d634fcb57a6a11659c74'], - }), - ('fftw', '1.0-6', { - 'checksums': ['397ef5ec354b919884f74fba4202bfc13ad11a70b16285c41677aad1d3b170ce'], - }), - ('seewave', '2.1.5', { - 'checksums': ['718b1fb1c289f92be50de099da36d20380d113cb1577569333fca6195f71e8e1'], - }), - ('gsw', '1.0-5', { - 'checksums': ['eb468918ee91e429b47fbcac43269eca627b7f64b61520de5bbe8fa223e96453'], - }), - ('oce', '1.2-0', { - 'checksums': ['99072f2b20ad471b5a2afeb4d0690cad57cc770d60769a5cb20d001511439aa2'], - }), - ('ineq', '0.2-13', { - 'checksums': ['e0876403f59a3dfc2ea7ffc0d965416e1ecfdecf154e5856e5f54800b3efda25'], - }), - ('soundecology', '1.3.3', { - 'checksums': ['276164d5eb92c78726c647be16232d2443acbf7061371ddde2672b4fdb7a069a'], - }), - ('memuse', '4.1-0', { - 'checksums': ['58d6d1ca5d6bd481f4ed299eff6a9d5660eb0f8db1abe54c49e144093cba72ad'], - }), - ('pinfsc50', '1.1.0', { - 'checksums': ['b6b9b6365a3f408533264d7ec820494f57eccaf362553e8478a46a8e5b474aba'], - }), - ('vcfR', '1.10.0', { - 'checksums': ['9e19c8b23c981b61320aa275821f9accae8738bca775175b1201fcc30479ae8d'], - }), - ('glmmML', '1.1.0', { - 'checksums': ['34f088a73ccf6092908502a5bdaaf8209e9134d38abbbd7c4dd559832e653188'], - }), - ('cowplot', '1.0.0', { - 'checksums': ['70f9a7c46d10f409d1599f1afc9fd3c947051cf2b430f01d903c64ef1e6c98a5'], - }), - ('tsne', '0.1-3', { - 'checksums': ['66fdf5d73e69594af529a9c4f261d972872b9b7bffd19f85c1adcd66afd80c69'], - }), - ('sn', '1.6-1', { - 'checksums': ['80071625131256147f94a1a35b6f0cabd6de8b225f16860e398b6a8ca688d96a'], - }), - ('tclust', '1.4-1', { - 'checksums': ['4b0be612c8ecd7b4eb19a44ab6ac8f5d40515600ae1144c55989b6b41335ad9e'], - }), - ('ranger', '0.12.1', { - 'checksums': ['fc308e0ac06718272799928e1a19612de16b05bde481d8f38e11a101df5425ef'], - }), - ('hexbin', '1.28.1', { - 'checksums': ['42d092c709ebc84b18df8121beb6bd1d8a3f6f357afd5c3490757c4c4795c6e7'], - }), - ('pryr', '0.1.4', { - 'checksums': ['d39834316504c49ecd4936cbbcaf3ee3dae6ded287af42475bf38c9e682f721b'], - }), - ('moments', '0.14', { - 'checksums': ['2a3b81e60dafdd092d2bdd3513d7038855ca7d113dc71df1229f7518382a3e39'], - }), - ('laeken', '0.5.1', { - 'checksums': ['1aa94a1768969eb999f7a41212af2d8b2943b43a68a92f99c9f77929e19439a5'], - }), - ('VIM', '5.1.1', { - 'checksums': ['ca1430103b6bd658e318bbbbd9c25763d11d0b3f52706b1a7ea7fafd408e4270'], - }), - ('proxy', '0.4-24', { - 'checksums': ['8cff9bf036475941a7c44ba9bb5e2f6d4777d49ab3daaeb52d23f4b2af6d9c7c'], - }), - ('smoother', '1.1', { - 'checksums': ['91b55b82f805cfa1deedacc0a4e844a2132aa59df593f3b05676954cf70a195b'], - }), - ('dynamicTreeCut', '1.63-1', { - 'checksums': ['831307f64eddd68dcf01bbe2963be99e5cde65a636a13ce9de229777285e4db9'], - }), - ('beeswarm', '0.2.3', { - 'checksums': ['0115425e210dced05da8e162c8455526a47314f72e441ad2a33dcab3f94ac843'], - }), - ('vipor', '0.4.5', { - 'checksums': ['7d19251ac37639d6a0fed2d30f1af4e578785677df5e53dcdb2a22771a604f84'], - }), - ('ggbeeswarm', '0.6.0', { - 'checksums': ['bbac8552f67ff1945180fbcda83f7f1c47908f27ba4e84921a39c45d6e123333'], - }), - ('shinydashboard', '0.7.1', { - 'checksums': ['51a49945c6b8a684111a2ba4b2a5964e3a50610286ce0378e37ae02316620a4e'], - }), - ('rrcov', '1.5-2', { - 'checksums': ['a7641b93ca8efd91b0957adecd76f96c53d3804ace7b1cbe84872f655199c254'], - }), - ('WriteXLS', '5.0.0', { - 'checksums': ['5aeb631c7f4dee300a19ded493110d7241e1b79744be05beca770a01ffc1d7bf'], - }), - ('bst', '0.3-17', { - 'checksums': ['1ed161d33a7304abfa2fb23daeda2f870ad8483b7fa9b91e6fc8ced21fd8f074'], - }), - ('mpath', '0.3-25', { - 'checksums': ['3332f74255520152cb2149bdff24ad650a036161a7629f686c8fee804c0336e8'], - }), - ('timereg', '1.9.4', { - 'checksums': ['fbf4eeee1648fceb98773156764c32b3a9481f0fb9f8dc3a9d0331a9051cb54b'], - }), - ('peperr', '1.1-7.1', { - 'checksums': ['5d4eff0f0b61c0b3e479c2ac2978c8e32373b9630565bf58fee48ead6166698a'], - }), - ('heatmap3', '1.1.7', { - 'checksums': ['bab39bdcc462ed9e15dda54d58385b7c8d2bca800cd0e6ee2fce12475661b2bd'], - }), - ('GlobalOptions', '0.1.1', { - 'checksums': ['4249ef78424128050af83bbb8e71b4af82f8490c87f6a9d927782b80be830975'], - }), - ('circlize', '0.4.8', { - 'checksums': ['22d6908b9d2e496105d9b70b73a74152398e5e9e38c60042ffe041df2b4c794b'], - }), - ('GetoptLong', '0.1.8', { - 'checksums': ['6c0edb7233b79fb7f4789a825e8e7d7eee50b5e85b7fd5b7d74b9440fd9e1dd1'], - }), - ('dendextend', '1.13.4', { - 'checksums': ['c456b4f43075e8de0f29a6c997e1c0d4788487ab7b947a4b1bf05db2b4f94bde'], - }), - ('RInside', '0.2.16', { - 'checksums': ['7ae4ade128ea05f37068d59e610822ff0b277f9d39d8900f7eb31759ad5a2a0e'], - }), - ('limSolve', '1.5.6', { - 'checksums': ['b97ea9930383634c8112cdbc42f71c4e93fe0e7bfaa8f401921835cb44cb49a0'], - }), - ('dbplyr', '1.4.3', { - 'checksums': ['69ac7b4022c691e3822fc73fabb3bf073405d5a433c52f5f0f98cf90a1d228ea'], - }), - ('modelr', '0.1.6', { - 'checksums': ['d7e5f3ddf0b3e6520ca06229471f5bcd9e371e2fecd53c03202b474c2a1955f4'], - }), - ('debugme', '1.1.0', { - 'checksums': ['4dae0e2450d6689a6eab560e36f8a7c63853abbab64994028220b8fd4b793ab1'], - }), - ('reprex', '0.3.0', { - 'checksums': ['203c2ae6343f6ff887e7a5a3f5d20bae465f6e8d9745c982479f5385f4effb6c'], - }), - ('selectr', '0.4-2', { - 'checksums': ['5588aed05f3f5ee63c0d29953ef53da5dac7afccfdd04b7b22ef24e1e3b0c127'], - }), - ('rvest', '0.3.5', { - 'checksums': ['0e7f41be4ce6501d7af50575a2532d4bfd9153ca57900ee62dbc27c0a22c0a64'], - }), - ('tidyverse', '1.3.0', { - 'checksums': ['6d8acb81e994f9bef5e4dcf908bcea3786d108adcf982628235b6c8c80f6fe09'], - }), - ('R.cache', '0.14.0', { - 'checksums': ['18af4e372440b9f28b4b71346c8ed9de220232f9903730ccee2bfb3c612c16d9'], - }), - ('R.rsp', '0.43.2', { - 'checksums': ['f291a78ce9955943e0ebad1291f729dc4d9a8091f04b83fc4b1526bcb6c71f89'], - }), - ('listenv', '0.8.0', { - 'checksums': ['fd2aaf3ff2d8d546ce33d1cb38e68401613975117c1f9eb98a7b41facf5c485f'], - }), - ('globals', '0.12.5', { - 'checksums': ['1519a7668b4b549c081f60a5f6b71d8d1dc8833f618125f6c0e4caf8b48a48c1'], - }), - ('future', '1.17.0', { - 'checksums': ['2fa3b88439eaa33901669295186d04eb54f033257015683cf8a2e3c7f83b9e34'], - }), - ('gdistance', '1.3-1', { - 'checksums': ['0e9a7ab4fb75c2990ff7b85aa0661aaadbf4804f2a92fac9dd6d3c75db346813'], - }), - ('vioplot', '0.3.4', { - 'checksums': ['4914262f2e7913ffa5741e74b20157f4a904ba31e648fa5df9ff6a1aaba753bb'], - }), - ('emulator', '1.2-20', { - 'checksums': ['7cabf2cf74d879ad9dbaed8fdee54a5c94a8658a0645c021d160b2ef712ce287'], - }), - ('gmm', '1.6-4', { - 'checksums': ['03ad5ff37d174e9cef13fa41d866412c57b7cbd9155312831e16a1fcda70bc95'], - }), - ('tmvtnorm', '1.4-10', { - 'checksums': ['1a9f35e9b4899672e9c0b263affdc322ecb52ec198b2bb015af9d022faad73f0'], - }), - ('IDPmisc', '1.1.20', { - 'checksums': ['bcb9cd7b8097e5089d1936286ef310ac2030ea7791350df706382ba470afc67f'], - }), - ('gap', '1.2.2', { - 'checksums': ['9c66a52b371b282b20295676bdd86a11d59a6fb2acddb19170376e1a5c65b834'], - }), - ('qrnn', '2.0.5', { - 'checksums': ['3bd83ee8bd83941f9defdab1b5573d0ceca02bf06759a67665e5b9358ff92f52'], - }), - ('TMB', '1.7.16', { - 'checksums': ['84740a2eaecd2ece7049c82d661fe1688008fdece96d90399d31a5d8a0089e52'], - }), - ('glmmTMB', '1.0.1', { - 'checksums': ['b582ac41fb4390146f1446c6629fec40c6c9c125f99083602f091dc60f0ebd69'], - }), - ('gmp', '0.5-13.6', { - 'checksums': ['39a61618cc9eeabd00665cc5f24721e75f0dec8268059a0d18c907c2adf85a48'], - }), - ('ROI', '0.3-3', { - 'checksums': ['2977604b9def46a3638d56a7efa890f2e84fa320bece693d03c196771466a919'], - }), - ('Rglpk', '0.6-4', { - 'checksums': ['a28dbc3130b9618d6ed2ef718d2c55df8ed8c44a47161097c53fe15fa3bfbfa6'], - }), - ('ROI.plugin.glpk', '0.3-0', { - 'checksums': ['160ac14d20c217ff186912c06d53bccf2a33664977ae4c6fc5113a7ac8533ba8'], - }), - ('spaMM', '3.2.0', { - 'checksums': ['40d54ad52c4839f33baa1e488e1e76042e57083ff7780f9c5640c49340ff2999'], - }), - ('qgam', '1.3.2', { - 'checksums': ['273a40d0bfdc340c049bcb85aea83acd887868d8a69c0062b8399e0b24137a52'], - }), - ('DHARMa', '0.3.0', { - 'checksums': ['1c7ac2f1897ca62e0ebb7367c4b31866515c8503d0fa645fa5e8ac5172310298'], - }), - ('mvnfast', '0.2.5', { - 'checksums': ['21b9fa72d1e3843513908aaacd6c4d876cc7a9339782d0151b24910df2975f88'], - }), - ('bridgesampling', '1.0-0', { - 'checksums': ['9e182e15ba4e0a0fefd6edc58f1939fd971dd5c53c444ca9c1820bb2c1de90ab'], - }), - ('BayesianTools', '0.1.7', { - 'checksums': ['af49389bdeb794da3c39e1d63f59e6219438ecb8613c5ef523b00c6fed5a600c'], - }), - ('gomms', '1.0', { - 'checksums': ['52828c6fe9b78d66bde5474e45ff153efdb153f2bd9f0e52a20a668e842f2dc5'], - }), - ('feather', '0.3.5', { - 'checksums': ['50ff06d5e24d38b5d5d62f84582861bd353b82363e37623f95529b520504adbf'], - }), - ('dummies', '1.5.6', { - 'checksums': ['7551bc2df0830b98c53582cac32145d5ce21f5a61d97e2bb69fd848e3323c805'], - }), - ('SimSeq', '1.4.0', { - 'checksums': ['5ab9d4fe2cb1b7634432ff125a9e04d2f574fed06246a93859f8004e10790f19'], - }), - ('uniqueAtomMat', '0.1-3-2', { - 'checksums': ['f7024e73274e1e76a870ce5e26bd58f76e8f6df0aa9775c631b861d83f4f53d7'], - }), - ('PoissonSeq', '1.1.2', { - 'checksums': ['6f3dc30ad22e33e4fcfa37b3427c093d591c02f1b89a014d85e63203f6031dc2'], - }), - ('aod', '1.3.1', { - 'checksums': ['052d8802500fcfdb3b37a8e3e6f3fbd5c3a54e48c3f68122402d2ea3a15403bc'], - }), - ('cghFLasso', '0.2-1', { - 'checksums': ['6e697959b35a3ceb2baa1542ef81f0335006a5a9c937f0173c6483979cb4302c'], - }), - ('svd', '0.5', { - 'checksums': ['d042d448671355d0664d37fd64dc90932eb780e6494c479d4431d1faae2071a1'], - }), - ('Rssa', '1.0.2', { - 'checksums': ['3991ad98e0170034b06ae8bb5b6337cbc418dc31ce465d02030cedf4ab69ff91'], - }), - ('JBTools', '0.7.2.9', { - 'checksums': ['b33cfa17339df7113176ad1832cbb0533acf5d25c36b95e888f561d586c5d62f'], - }), - ('RUnit', '0.4.32', { - 'checksums': ['23a393059989000734898685d0d5509ece219879713eb09083f7707f167f81f1'], - }), - ('DistributionUtils', '0.6-0', { - 'checksums': ['7443d6cd154760d55b6954142908eae30385672c4f3f838dd49876ec2f297823'], - }), - ('gapfill', '0.9.6', { - 'checksums': ['850d0be9d05e3f3620f0f5143496321f1004ed966299bffd6a67a9abd8d9040d'], - }), - ('gee', '4.13-20', { - 'checksums': ['53014cee059bd87dc22f9679dfbf18fe6813b9ab41dfe90361921159edfbf798'], - }), - ('Matching', '4.9-7', { - 'checksums': ['1956ecb5ebe1c88e2112cd277ae5c2ab4b8d8f60743e6e856a2c2e40aa05fc6d'], - }), - ('MatchIt', '3.0.2', { - 'checksums': ['782b159a2b5172e758e3993177930d604140ae668fd8a7c98c30792df80de9de'], - }), - ('RItools', '0.1-17', { - 'checksums': ['75654780e9ca39cb3c43acfaca74080ad74de50f92c5e36e95694aafdfdc0cea'], - }), - ('optmatch', '0.9-13', { - 'checksums': ['f8f327faa95c808773376570793bbabdbc185a6c7fcdce3b96a09c998134d0d8'], - }), - ('SPAtest', '3.0.2', { - 'checksums': ['7a5e02f636df4c299d3a2d36033f26492b6db51f04a5cd1c2ff17e7ec1a4e831'], - }), - ('SKAT', '2.0.0', { - 'checksums': ['b90be9552f65f0055311ec7a4de5b33520a040f9202aa5872fbfae306c496ce2'], - }), - ('GillespieSSA', '0.6.1', { - 'checksums': ['272e9b6b26001d166fd7ce8d04f32831ba23c676075fbd1e922e27ba2c962052'], - }), - ('startupmsg', '0.9.6', { - 'checksums': ['1d60ff13bb260630f797bde66a377a5d4cd65d78ae81a3936dc4374572ec786e'], - }), - ('distr', '2.8.0', { - 'checksums': ['bb7df05d6b946bcdbbec2e3397c7c7e349b537cabfcbb13a34bcf6312a71ceb7'], - }), - ('distrEx', '2.8.0', { - 'checksums': ['b064cde7d63ce93ec9969c8c4463c1e327758b6f8ea7765217d77f9ba9d590bf'], - }), - ('KODAMA', '1.5', { - 'checksums': ['8ecf53732c1be2bd1e111b3c6de65b66caf28360306e683fe945dc76d4c267dd'], - }), - ('locfdr', '1.1-8', { - 'checksums': ['42d6e12593ae6d541e6813a140b92591dabeb1df94432a515507fc2eee9a54b9'], - }), - ('ica', '1.0-2', { - 'checksums': ['e721596fc6175d3270a60d5e0b5b98be103a8fd0dd93ef16680af21fe0b54179'], - }), - ('dtw', '1.21-3', { - 'checksums': ['1aa46b285b7a31ba19759e83562671ed9076140abec79fe0df0316af43871e0a'], - }), - ('SDMTools', '1.1-221.2', { - 'checksums': ['f0dd8c5f98d2f2c012536fa56d8f7a58aaf0c11cbe3527e66d4ee3194f6a6cf7'], - }), - ('ggridges', '0.5.2', { - 'checksums': ['b03a775df279a71f259470335decf033b0b9e34b7ee5726681b302ae4e11ff0e'], - }), - ('TFisher', '0.2.0', { - 'checksums': ['bd9b7484d6fba0165841596275b446f85ba446d40e92f3b9cb37381a3827e76f'], - }), - ('lsei', '1.2-0', { - 'checksums': ['4781ebd9ef93880260d5d5f23066580ac06061e95c1048fb25e4e838963380f6'], - }), - ('npsurv', '0.4-0', { - 'checksums': ['404cf7135dc40a04e9b81224a543307057a8278e11109ba1fcaa28e87c6204f3'], - }), - ('fitdistrplus', '1.0-14', { - 'checksums': ['85082590f62aa08d99048ea3414c5cc1e5b780d97b3779d2397c6cb435470083'], - }), - ('rappdirs', '0.3.1', { - 'checksums': ['2fd891ec16d28862f65bb57e4a78f77a597930abb59380e757afd8b6c6d3264a'], - }), - ('reticulate', '1.15', { - 'checksums': ['47db3e9c9424263ade15287da8e74f6ba261a936b644b197dba6772853b7b50d'], - }), - ('hdf5r', '1.3.2', { - 'installopts': '--configure-args="--with-hdf5=$EBROOTHDF5/bin/h5pcc"', - 'preinstallopts': "unset LIBS && ", - 'checksums': ['31493d9dde9705543e5474c937fa5b4b64895ae1dd6ee51d7039dd95a6015730'], - }), - ('DTRreg', '1.5', { - 'checksums': ['eb9b4d98b25eec304a447db302f618a75180f8d8fe0f5728ecd7e85957613456'], - }), - ('pulsar', '0.3.6', { - 'checksums': ['b5851bf365003ace07542fd21ccff015c4b21ffd73e21ec3a539563e9ef53564'], - }), - ('bayesm', '3.1-4', { - 'checksums': ['061b216c62bc72eab8d646ad4075f2f78823f9913344a781fa53ea7cf4a48f94'], - }), - ('energy', '1.7-7', { - 'checksums': ['67b88fb33ee6e7bec2e4fe356a4efd36f70c3cf9b0ebe2f6d9da9ec96de9968f'], - }), - ('compositions', '1.40-5', { - 'checksums': ['879e296037b0b3c52cfe48556820500b94d4eea16ec2b40f85988b65c5f72a51'], - }), - ('clustree', '0.4.2', { - 'checksums': ['5d6b8ee3cbbcdd235a7abe4107429e45847ed09ec1cdb572ad6efb9d88dff82e'], - }), - ('plotly', '4.9.2.1', { - 'checksums': ['f45eae325ab7e7924b0be098bad866ce003d657cf63e137104401c2dd4401db8'], - }), - ('tweedie', '2.3.2', { - 'checksums': ['9a6226e64e3d56eb7eb2a408f8b825c2ad6ee0ea203a9220e85e7789514adb81'], - }), - ('RcppGSL', '0.3.7', { - 'checksums': ['45e95c4170fc8421ae9b32134b3a402f76ea9657030969723a3563c7ce14dc32'], - }), - ('mvabund', '4.1.3', { - 'checksums': ['4b98049026fcc5a262163f6801d5b98b8543267cf7b0edac8382d5311b81a8fc'], - }), - ('fishMod', '0.29', { - 'checksums': ['5989e49ca6d6b2c5d514655e61f75b019528a8c975f0d6056143f17dc4277a5d'], - }), - ('gllvm', '1.2.1', { - 'checksums': ['a9dca68227a8f89c61950f6411de3b988e6e067d97fadc589f69ddd731c2e1ff'], - }), - ('grpreg', '3.2.2', { - 'checksums': ['e59f576ee5d794444917e0fbdab0d1ebf4aa71967c9a35ec196899ed5b168388'], - }), - ('trust', '0.1-8', { - 'checksums': ['952e348b62aec35988b103fd152329662cb6a451538f184549252fbf49d7dcac'], - }), - ('ergm', '3.10.4', { - 'checksums': ['885f0b1a23c5a2c1947962350cfab66683dfdfd1db173c115e90396d00831f22'], - }), - ('networkDynamic', '0.10.1', { - 'checksums': ['22eed8d9dea8d70877c1619eb2bc3f1ac5142ce3db6fd6eb3e0879ca56b76ca0'], - }), - ('tergm', '3.6.1', { - 'checksums': ['21de2eca943d89ba63af14951655d626f241bafccc4b2709fa39aa130625cd0f'], - }), - ('ergm.count', '3.4.0', { - 'checksums': ['7c24c79d0901c18991cce907306a1531cca676ae277c6b0a0e4962ad27c36baf'], - }), - ('tsna', '0.3.1', { - 'checksums': ['bba4b5e04ba647784581a2137f653f60b4c83cfd726c399556054c5a6d2cbd95'], - }), - ('statnet', '2019.6', { - 'checksums': ['0903e1a81ed1b6289359cefd12da1424c92456d19e062c3f74197b69e536b29d'], - }), - ('aggregation', '1.0.1', { - 'checksums': ['86f88a02479ddc8506bafb154117ebc3b1a4a44fa308e0193c8c315109302f49'], - }), - ('ComICS', '1.0.4', { - 'checksums': ['0af7901215876f95f309d7da6e633c38e4d7faf04112dd6fd343bc15fc593a2f'], - }), - ('dtangle', '2.0.9', { - 'checksums': ['c375068c1877c2e8cdc5601cfd5a9c821645c3dff90ddef64817f788f372e179'], - }), - ('mcmc', '0.9-7', { - 'checksums': ['b7c4d3d5f9364c67a4a3cd49296a61c315ad9bd49324a22deccbacb314aa8260'], - }), - ('MCMCpack', '1.4-6', { - 'checksums': ['6bcd018d6fa589a6854ee1bcea18b9d6c4095f3deae9058f69afbb09cba873c7'], - }), - ('shinythemes', '1.1.2', { - 'checksums': ['2e13d4d5317fc61082e8f3128b15e0b10ed9736ce81e152dd7ae7f6109f9b18a'], - }), - ('csSAM', '1.2.4', { - 'checksums': ['3d6442ad8c41fa84633cbbc275cd67e88490a160927a5c55d29da55a36e148d7'], - }), - ('bridgedist', '0.1.0', { - 'checksums': ['dc7c1c8874d6cfa34d550d9af194389e13471dfbc55049a1ab66db112fbf1343'], - }), - ('asnipe', '1.1.12', { - 'checksums': ['3a1f166f1c71b5877a2acca1384ec6c9b430b67af67ef26125f2abbb53c66206'], - }), - ('oddsratio', '2.0.0', { - 'checksums': ['89bf3c68a6ded6a98f4ee8d487c29605ad00ac5f8db9b8bf1a52144e65332553'], - }), - ('mltools', '0.3.5', { - 'checksums': ['7093ffceccdf5d4c3f045d8c8143deaa8ab79935cc6d5463973ffc7d3812bb10'], - }), - ('h2o', '3.30.0.1', { - 'checksums': ['cb11ef58a7d7dfd3a9193686ddd9c8a9f988b33a69656d8b3e8f59082068b0f5'], - }), - ('mlegp', '3.1.7', { - 'checksums': ['d4845eaf9260f8b8112726dd7ceb5c2f5ce75125fa313191db9de121f2ee15e0'], - }), - ('itertools', '0.1-3', { - 'checksums': ['b69b0781318e175532ad2d4f2840553bade9637e04de215b581704b5635c45d3'], - }), - ('missForest', '1.4', { - 'checksums': ['f785804b03bdf424e1c76095989a803afb3b47d6bebca9a6832074b6326c0278'], - }), - ('bartMachineJARs', '1.1', { - 'checksums': ['f2c31cb94d7485174a2519771127a102e35b9fe7f665e27beda3e76a56feeef2'], - }), - ('bartMachine', '1.2.4.2', { - 'checksums': ['28a5f7363325021bd93f9bd060cc48f20c689dae2f2f6f7100faae66d7651f80'], - }), - ('lqa', '1.0-3', { - 'checksums': ['3889675dc4c8cbafeefe118f4f20c3bd3789d4875bb725933571f9991a133990'], - }), - ('PresenceAbsence', '1.1.9', { - 'checksums': ['1a30b0a4317ea227d674ac873ab94f87f8326490304e5b08ad58953cdf23169f'], - }), - ('GUTS', '1.1.1', { - 'checksums': ['094b8f51719cc36ddc56e3412dbb146eafc93c5e8fbb2c5999c2e80ea7a7d216'], - }), - ('GenSA', '1.1.7', { - 'checksums': ['9d99d3d0a4b7770c3c3a6de44206811272d78ab94481713a8c369f7d6ae7b80f'], - }), - ('parsedate', '1.2.0', { - 'checksums': ['39ab3c507cb3efcd677c6cf453f46d6b1948662bd70c7765845e755ea1e1633d'], - }), - ('circular', '0.4-93', { - 'checksums': ['76cee2393757390ad91d3db3e5aeb2c2d34c0a46822b7941498571a473417142'], - }), - ('cobs', '1.3-4', { - 'checksums': ['a1c7b77e4ca097349884fd1c0d863d74f9092766131094d603f34d33ab2e3c42'], - }), - ('resample', '0.4', { - 'checksums': ['f0d5f735e1b812612720845d79167a19f713a438fd10a6a3206e667045fd93e5'], - }), - ('MIIVsem', '0.5.4', { - 'checksums': ['de918d6b1820c59a7d4324342ad15444c2370ce1d843397a136c307397ed64b9'], - }), - ('medflex', '0.6-6', { - 'checksums': ['b9d04fb5281d0ea0555ec4f327a0ee951a7f312a3af944578dc175183dc49211'], - }), - ('Rserve', '1.7-3.1', { - 'checksums': ['3ba1e919706e16a8632def5f45d666b6e44eafa6c14b57064d6ddf3415038f99'], - }), - ('spls', '2.2-3', { - 'checksums': ['bbd693da80487eef2939c37aba199f6d811ec289828c763d9416a05fa202ab2e'], - }), - ('Boruta', '6.0.0', { - 'checksums': ['1c9a7aabe09f040e147f6c614f5fe1d0b951d3b0f0024161fbb4c31da8fae8de'], - }), - ('dr', '3.0.10', { - 'checksums': ['ce523c1bdb62a9dda30afc12b1dd96975cc34695c61913012236f3b80e24bf36'], - }), - ('CovSel', '1.2.1', { - 'checksums': ['b375d00cc567e125ff106b4357654f43bba3abcadeed2238b6dea4b7a68fda09'], - }), - ('tmle', '1.4.0.1', { - 'checksums': ['075e7b7fe0496e02785eb35aed0db84476db756c6f14a0047808af2565b33501'], - }), - ('ctmle', '0.1.2', { - 'checksums': ['e3fa0722cd87aa0e0b209c2dddf3fc44c6d09993f1e66a6c43285fe950948161'], - }), - ('BayesPen', '1.0', { - 'checksums': ['772df9ae12cd8a3da1d5b7d1f1629602c7693f0eb03945784df2809e2bb061b0'], - }), - ('inline', '0.3.15', { - 'checksums': ['ff043fe13c1991a3b285bed256ff4a9c0ba10bee764225a34b285875b7d69c68'], - }), - ('BMA', '3.18.12', { - 'checksums': ['cbabb77d92b09a11a986ad03950322e78dff743f82ff67fda51d32e44135dd79'], - }), - ('BCEE', '1.3.0', { - 'checksums': ['82afc9b8c6d617f5f728341960ae32922194f637c550916b3bea12c231414fa7'], - }), - ('bacr', '1.0.1', { - 'checksums': ['c847272e2c03fd08ed79b3b739f57fe881af77404b6fd087caa0c398c90ef993'], - }), - ('clue', '0.3-57', { - 'checksums': ['6e369d07b464a9624209a06b5078bf988f01f7963076e946649d76aea0622d17'], - }), - ('bdsmatrix', '1.3-4', { - 'checksums': ['251e21f433a016ec85e478811ea3ad34c572eb26137447f48d1bbf3cc8bb06ea'], - }), - ('fftwtools', '0.9-8', { - 'checksums': ['4641c8cd70938c2a8bde0b6da6cf7f83e96175ef52f1ca42ec3920a1dabf1bdb'], - }), - ('imagerExtra', '1.3.2', { - 'checksums': ['0ebfa1eabb89459d774630ab73c7a97a93b9481ea5afc55482975475acebd5b8'], - }), - ('MALDIquant', '1.19.3', { - 'checksums': ['a730327c1f8d053d29e558636736b7b66d0671a009e0004720b869d2c76ff32c'], - }), - ('threejs', '0.3.3', { - 'checksums': ['76c759c8b20fb34f4f7a01cbd1b961296e1f19f4df6dded69aae7f1bca80219c'], - }), - ('LaplacesDemon', '16.1.4', { - 'checksums': ['4152a1c3c652979e97870e5c50c45a243d0ad8d4ff968091160e3d66509f61db'], - }), - ('rda', '1.0.2-2.1', { - 'checksums': ['eea3a51a2e132a023146bfbc0c384f5373eb3ea2b61743d7658be86a5b04949e'], - }), - ('sampling', '2.8', { - 'checksums': ['356923f35971bb55f7e97b178aede3366374aa3ad3d24a97be765660553bf21a'], - }), - ('lda', '1.4.2', { - 'checksums': ['5606a1e1bc24706988853528023f7a004c725791ae1a7309f1aea2fc6681240f'], - }), - ('jiebaRD', '0.1', { - 'checksums': ['045ee670f5378fe325a45b40fd55136b355cbb225e088cb229f512c51abb4df1'], - }), - ('jiebaR', '0.11', { - 'checksums': ['adde8b0b21c01ec344735d49cd33929511086719c99f8e10dce4ca9479276623'], - }), - ('hdm', '0.3.1', { - 'checksums': ['ba087565e9e0a8ea30a6095919141895fd76b7f3c05a03e60e9e24e602732bce'], - }), - ('abe', '3.0.1', { - 'checksums': ['66d2e9ac78ba64b7d27b22b647fc00378ea832f868e51c18df50d6fffb8029b8'], - }), - ('SignifReg', '3.0', { - 'checksums': ['ada4e1f8cbb08ba8ff16275ec5f9a453857e0cab63b70d42753989ab4c716b7b'], - }), - ('bbmle', '1.0.23.1', { - 'checksums': ['60421eb01190b741ab14885eaf1088f51d49dcf70e58c42b360489bca04e745c'], - }), - ('emdbook', '1.3.12', { - 'checksums': ['0646caf9e15aaa61ff917a4b5fdf82c06ac17ef221a61dec3fbb554e7bff4353'], - }), - ('SOAR', '0.99-11', { - 'checksums': ['d5a0fba3664087308ce5295a1d57d10bad149eb9771b4fe67478deae4b7f68d8'], - }), - ('rasterVis', '0.47', { - 'checksums': ['123ebe870895c2ba3a4b64d8a18bccab5287c831fa14bb0fe07f0d7de61e51d3'], - }), - ('tictoc', '1.0', { - 'checksums': ['47da097c1822caa2d8e262381987cfa556ad901131eb96109752742526b2e2fe'], - }), - ('ISOcodes', '2020.03.16', { - 'checksums': ['160eb4ea23be53305e4e728002c8f5d3852d89155f538deccb734e7c8ad4e1c3'], - }), - ('stopwords', '2.0', { - 'checksums': ['5cca60ce9f44406486e0dca2e36cec2488096c3558b45fc3bd0e7b6d1500af94'], - }), - ('janeaustenr', '0.1.5', { - 'checksums': ['992f6673653daf7010fe176993a01cd4127d9a88be428da8da7a28241826d6f3'], - }), - ('SnowballC', '0.7.0', { - 'checksums': ['b10fee9d322f567a22c580b49b5d4ba1c86eae40a71794ca92552c726b3895f3'], - }), - ('tokenizers', '0.2.1', { - 'checksums': ['28617cdc5ddef5276abfe14a2642999833322b6c34697de1d4e9d6dc7670dd00'], - }), - ('hunspell', '3.0', { - 'checksums': ['01fb9c87f7cf094aaad3b7098378134f2e503286224351e91d08c00b6ee19857'], - }), - ('topicmodels', '0.2-11', { - 'checksums': ['9c26b4d967be6ec26834a39f04aa92b059ea9503eb70c700e1c0a7a43637b74a'], - }), - ('tidytext', '0.2.4', { - 'checksums': ['46ff59063b6a519c9eb606ae135ef31d7073ac729e4a912c9f77e234801b933d'], - }), - ('splitstackshape', '1.4.8', { - 'checksums': ['656032c3f1e3dd5b8a3ee19ffcae617e07104c0e342fc3da4d863637a770fe56'], - }), - ('grImport2', '0.2-0', { - 'checksums': ['a102a2d877e42cd4e4e346e5510a77b2f3e57b43ae3c6d5c272fdceb506b00a7'], - }), - ('preseqR', '4.0.0', { - 'checksums': ['0143db473fb9a811f9cf582a348226a5763e62d9857ce3ef4ec41412abb559bc'], - }), - ('idr', '1.2', { - 'checksums': ['8bbfdf82c8c2b5c73eb079127e198b6cb65c437bb36729f502c7bcd6037fdb16'], - }), - ('entropy', '1.2.1', { - 'checksums': ['edb27144b8f855f1ef21de6b93b6b6c5cf7d4f2c3d592bf625e5158c02226f83'], - }), - ('kedd', '1.0.3', { - 'checksums': ['38760abd8c8e8f69ad85ca7992803060acc44ce68358de1763bd2415fdf83c9f'], - }), - ('HiddenMarkov', '1.8-11', { - 'checksums': ['4a1614249eee9f428bc182ea9ced443dff4eafa7babf4259c720e5b4da2d08fa'], - }), - ('lmerTest', '3.1-2', { - 'checksums': ['385870873fd303c2caa4ac43e2df0ca5aa36ddb484bfb4eefbc5c4ac4bef6de2'], - }), - ('loo', '2.2.0', { - 'checksums': ['466df60953a89fcb135b32909197c3ff26ecea719c191667faa5747324fb01c3'], - }), - ('StanHeaders', '2.21.0-1', { - 'checksums': ['4e94148af2960f203b208c2b725d8be628ca282c7a1e967a7e6e78aa5eb90d3f'], - }), - ('rstan', '2.19.3', { - 'checksums': ['d7025dccdc2337fd0cf3b9689c3e0a07558a47a08b7cbc370e0b3998256f1689'], - }), - ('Rborist', '0.2-3', { - 'checksums': ['f3b3f953ca99e0d17425ac6ba9a7b1e9d6098343abace575cdb492bca2a9c461'], - }), - ('VSURF', '1.1.0', { - 'checksums': ['eee99e0c441795c2ccb21cc6e0a37b24f580241e494c83e811b726b43469eeab'], - }), - ('mRMRe', '2.1.0', { - 'checksums': ['fe23c5c1e7b5b653e0358e98f25ebd8c0c74c6c871606d1b24cd02a5534181d4'], - }), - ('dHSIC', '2.1', { - 'checksums': ['94c86473790cf69f11c68ed8ba9d6ae98218c7c69b7a9a093f235d175cf83db0'], - }), - ('ggsci', '2.9', { - 'checksums': ['4af14e6f3657134c115d5ac5e65a2ed74596f9a8437c03255447cd959fe9e33c'], - }), - ('ggsignif', '0.6.0', { - 'checksums': ['6fe13efda31386483e64d466ba2f5a53a2a235ae04f5c17bba3ccc63d283499e'], - }), - ('corrplot', '0.84', { - 'checksums': ['0dce5e628ead9045580a191f60c58fd7c75b4bbfaaa3307678fc9ed550c303cc'], - }), - ('rstatix', '0.5.0', { - 'checksums': ['06b2ba1b16698c93203474eb67e59169fa4127283d46cf694e032a5472956b46'], - }), - ('ggfan', '0.1.3', { - 'checksums': ['5c888b203ecf5e3dc7a317a790ca059c733002fbca4b4bc1a4f62b7ded5f70dc'], - }), - ('ggpubr', '0.3.0', { - 'checksums': ['b82ffc6bd5974c3036d08393fa1f2bafeaf6f567e0b3faf43e38226b19399eb6'], - }), - ('yaImpute', '1.0-32', { - 'checksums': ['08eee5d851b80aad9c7c80f9531aadd50d60e4b16b3a80657a50212269cd73ff'], - }), - ('intrinsicDimension', '1.2.0', { - 'checksums': ['6cc9180a83aa0d123f1e420136bb959c0d5877867fa170b79536f5ee22106a32'], - }), - ('patchwork', '1.0.0', { - 'checksums': ['8bfb59b91775781848f39eedcaaaf92c147e2637f384085fcdd41fc8355b3c63'], - }), - ('leiden', '0.3.3', { - 'checksums': ['c2b5e1c061c8bbea494639a0d7f0fa22f5b41ff5fd911409e5f832f3575d06c2'], - }), - ('future.apply', '1.4.0', { - 'checksums': ['737e5a8e2d0ce2678835f5be15c96d491f690d307662ed6719be41937633f8cd'], - }), - ('sctransform', '0.2.1', { - 'checksums': ['d6430a81a66c93da770b1a7c55344df42187321038b4eee80b7066cdd8a7631f'], - }), - ('packrat', '0.5.0', { - 'checksums': ['d6a09290fbe037a6c740921c5dcd70b500e5b36e4713eae4010adf0c456bc5f7'], - }), - ('shinyjs', '1.1', { - 'checksums': ['8986181baa68fb2863eea65b9df1b04b9b4e1293685298531d42de3bc2f06892'], - }), - ('colourpicker', '1.0', { - 'checksums': ['f1dacbafb05c09f61b9bdd0fdcee5344409759b042a71ec46d7c9e3710107b7c'], - }), - ('ggExtra', '0.9', { - 'checksums': ['f22db92d6e3e610901998348acbcaa6652fa6c62a285a622d3b962ba9e89aba2'], - }), - ('findpython', '1.0.5', { - 'checksums': ['3e9a21988cb78833769b02680d128a0cc01bcb41aa9c9725ab1742f349759145'], - }), - ('argparse', '2.0.1', { - 'checksums': ['949843920d14fc7c162aedab331a936499541736e7dafbb103fbfd79be8147ab'], - }), - ('intergraph', '2.0-2', { - 'checksums': ['6cbe77f1e87fa1c110db2d46010f2f3ae72bfdb708ce2ca84c1cdc2cd6eb47a1'], - }), - ('ggnetwork', '0.5.8', { - 'checksums': ['a8c7c19a2bafce898c95d0b2401ef052925db57b85058c7203f0122b3af7bbbd'], - }), - ('qqman', '0.1.4', { - 'checksums': ['3ad01f82132bf75960ae0d8a81cae84eaf4a9ab262f183fc3d6439189e4a3aed'], - }), - ('rstantools', '2.0.0', { - 'checksums': ['d200a4d8c62d8577fdba819bf770e7abb11c57c3332f4498e1d30ce824598b3a'], - }), - ('bayesplot', '1.7.1', { - 'checksums': ['820ca9ca3258fc68333e75fd60898c0d0f08f513b66c161ca6159a54ad54006b'], - }), - ('dygraphs', '1.1.1.6', { - 'checksums': ['c3d331f30012e721a048e04639f60ea738cd7e54e4f930ac9849b95f0f005208'], - }), - ('rsconnect', '0.8.16', { - 'checksums': ['3f728c6a5153dca28f69b9355ae9d18c5f7e70d12495c0c047eef673c1053116'], - }), - ('shinystan', '2.5.0', { - 'checksums': ['45f9c552a31035c5de8658bb9e5d72da7ec1f88fbddb520d15fe701c677154a1'], - }), - ('brms', '2.12.0', { - 'checksums': ['fa21505dca65d027f1cf1c573258de5f3c51ca8b94abd6dcf9123a3a27a72999'], - }), - ('drgee', '1.1.10', { - 'checksums': ['e684f07f7dfec922380d4202922c11094f859721f77b31ff38b0d35d0f42c743'], - }), - ('stdReg', '3.4.0', { - 'checksums': ['b423df43e9c4bb8ffafe2de88b93fdcf3a90f964f136580ea1b849a83dba7400'], - }), - ('mcmcse', '1.4-1', { - 'checksums': ['6b181f56d60ddf55c9c08a2468ef9ffe3ec8a1b16cfa9a7742c3872597f85d17'], - }), - ('copCAR', '2.0-3', { - 'checksums': ['e626380d3f11ca6c756381f2423ef3661efb52667147114253416cc6151a71b8'], - }), - ('batchmeans', '1.0-4', { - 'checksums': ['8694573009d9070a76007281407d3314da78902e122a9d8aec1f819d3bbe562c'], - }), - ('ngspatial', '1.2-2', { - 'checksums': ['3fa79e45d3a502a58c1454593ec83dfc73144e92b34c14f617a6126557dd0d26'], - }), - ('BIGL', '1.4.3', { - 'checksums': ['7ef7edd5c1852409f1da176e810a9a2a7f0b9441e0e6459d4d16179f3eb000f4'], - }), - ('drugCombo', '1.1.1', { - 'checksums': ['9fdc3a7cf63552c32f1c7573258fc4ceacdaf5c475fe79aa4ca8c9226b9f8a38'], - }), - ('betareg', '3.1-3', { - 'checksums': ['cc19387ec516492d11cf59cdfa07e1733950a2af8196c1e155bc95939bc76246'], - }), - ('unmarked', '1.0.1', { - 'checksums': ['a3bb9bdc7a4a79ea38482df3f8cbb6e9082332a0d894eeb4b3dc816344cec0e4'], - }), - ('maxlike', '0.1-8', { - 'checksums': ['90aaab9602f259cbfae61fe96e105cc4a0c2a385b42380f85c14f5d544107251'], - }), - ('coxme', '2.2-16', { - 'checksums': ['a0ce4b5649c4c1abbfe2c2bf23089744d1f66eb8368dea16e74e090f366a5111'], - }), - ('AICcmodavg', '2.3-0', { - 'checksums': ['4d6bcff3c549be9dcefdd849b239659618fdaf9ba0d27b9d0589620d104e5e24'], - }), - ('pacman', '0.5.1', { - 'checksums': ['9ec9a72a15eda5b8f727adc877a07c4b36f8372fe7ed80a1bc6c2068dab3ef7c'], - }), - ('spaa', '0.2.2', { - 'checksums': ['a5a54454d4a7af473ce797875f849bd893005cb04325bf3e0dbddb19fe8d7198'], - }), - ('maxnet', '0.1.2', { - 'checksums': ['dfa02ca1031b369415d8b16863ca5fd115c7bf96b1f8fc24f91719b017f5cce5'], - }), - ('ENMeval', '0.3.0', { - 'checksums': ['1c924098a27c82a9bf75408173b47429c40cc41cae3aba18c86ba217bb45ba60'], - }), - ('plotmo', '3.5.7', { - 'checksums': ['aa366a49a63cabfd5d799a1524e23a4faed022f10ee60f8407d70ab70731b38c'], - }), - ('earth', '5.1.2', { - 'checksums': ['326f98e8c29365ca3cd5584cf2bd6529358f5ef81664cbd494162f92b6c3488d'], - }), - ('mda', '0.5-2', { - 'checksums': ['344f2053215ddf535d1554b4539e9b09067dac878887cc3eb995cef421fc00c3'], - }), - ('biomod2', '3.4.6', { - 'checksums': ['41fd7745f4d0af3f799e9cf4fa5484a427de6854db84c6476fde7a7414787d5b'], - }), - ('ncdf4', '1.17', { - 'checksums': ['db95c4729d3187d1a56dfd019958216f442be6221bd15e23cd597e6129219af6'], - }), - ('mapdata', '2.3.0', { - 'checksums': ['1edc180990387b6b1cd4e43a9505ebeb98e6115e4205c4f32f05b397c781dd76'], - }), - ('wavelets', '0.3-0.2', { - 'checksums': ['22d1d6bff71574a37b047ee5c31d17d52e321d2dd54db1a221f2d7267536e01c'], - }), - ('biwavelet', '0.20.19', { - 'checksums': ['2b9f99e4f56cee87ee2db53abcece205ff592887a9c9cf298c875a1ea594b25d'], - }), - ('wavethresh', '4.6.8', { - 'checksums': ['93b369c6eabcc0187b860b31d84a9d7c72c4a2ed5b23c1520c93bd7bea22e7db'], - }), - ('splancs', '2.01-40', { - 'checksums': ['79744381ebc4a361740a36dca3c9fca9ae015cfe0bd585b7856a664a3da74363'], - }), - ('RandomFieldsUtils', '0.5.3', { - 'checksums': ['ea823cba2e254a9f534efb4b772c0aeef2039ee9ef99744e077b969a87f8031d'], - }), - ('RandomFields', '3.3.8', { - 'checksums': ['8a08e2fdae428e354a29fb6818ae781cc56235a6849a0d29574dc756f73199d0'], - }), - ('geoR', '1.8-1', { - 'checksums': ['990647804590b925a50f72897b24bbabd331cebef0be1696a60528b2f79d6fd3'], - }), - ('intervals', '0.15.2', { - 'checksums': ['0bd23b0ce817ddd851238233d8a5420bf3a6d29e75fd361418cbc50118777c57'], - }), - ('spacetime', '1.2-3', { - 'checksums': ['ca7c0b962d5da0741f6dd85b271d693598756e0eeeb364ada828dbb6d1b9b25b'], - }), - ('gstat', '2.0-6', { - 'checksums': ['6711e68aa2444cf2927879a03a976d8caeca5eac98d806b19a6a7178b90bfcab'], - }), - ('rgeos', '0.5-3', { - 'checksums': ['357454e110ae19a665d5af5ffd7d670d2d7471566dd638dc614365c29b68600b'], - }), - ('repr', '1.1.0', { - 'checksums': ['743fe018f9e3e54067a970bc38b6b8c0c0498b43f88d179ac4a959c2013a5f96'], - }), - ('IRdisplay', '0.7.0', { - 'checksums': ['91eac9acdb92ed0fdc58e5da284aa4bb957ada5eef504fd89bec136747999089'], - }), - ('pbdZMQ', '0.3-3.1', { - 'checksums': ['9e034745cd9c1bdf510a2ec8e7060501abc92ec86020c430ed7e36f5d5cf1523'], - }), - ('IRkernel', '1.1.1', { - 'checksums': ['f5a129168f44bdda6da8cc907189a2737f692d427529515d87312a17dbd806f8'], - }), - # language server support - ('collections', '0.3.5', { - 'checksums': ['bf76ab5c6a8082b6bb70b9bf3bdb30658e823e3b7b28cf7be7e8a87d117a7114'], - }), - ('xmlparsedata', '1.0.4', { - 'checksums': ['387b13c25bea9ddc0a39b817c17c199b86ab9acafa328daae2233a9ca577fb9c'], - }), - ('cyclocomp', '1.1.0', { - 'checksums': ['cdbf65f87bccac53c1527a2f1269ec7840820c18503a7bb854910b30b71e7e3e'], - }), - ('lintr', '2.0.1', { - 'checksums': ['fe0723757b653ef83ec7a5005d0a7524cd917d646d35a5627ee639158881ce93'], - }), - ('styler', '1.3.2', { - 'checksums': ['3fcf574382c607c2147479bad4f9fa8b823f54fb1462d19ec4a330e135a44ff1'], - }), - ('mockery', '0.4.2', { - 'checksums': ['988e249c366ee7faf277de004084cf5ca24b5c8a8c6e3842f1b1362ce2f7ea9b'], - }), - ('languageserver', '0.3.8', { - 'checksums': ['371db6976d6066d654c9d31f911dba667c1f8ceb4ab67da34d44037b66f3ca9b'], - }), -] - -moduleclass = 'lang' diff --git a/Golden_Repo/r/R/Rmpi_0_6_9_psmpi_2020.patch b/Golden_Repo/r/R/Rmpi_0_6_9_psmpi_2020.patch deleted file mode 100644 index 37f693aa51910c62d291ac379f7c584e67be2bbf..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/R/Rmpi_0_6_9_psmpi_2020.patch +++ /dev/null @@ -1,8 +0,0 @@ ---- Rmpi/src/Makevars.orig 2020-08-12 16:00:24.750464585 +0200 -+++ Rmpi/src/Makevars.in 2020-08-12 16:00:42.941024431 +0200 -@@ -2,4 +2,4 @@ - - PKG_CPPFLAGS = @DEFS@ @PKG_CPPFLAGS@ - PKG_CFLAGS = $(ARCHCFLAGS) --PKG_LIBS = @PKG_LIBS@ $(ARCHLIB) -+PKG_LIBS = @PKG_LIBS@ -lmpifort $(ARCHLIB) diff --git a/Golden_Repo/r/RELION/RELION-3.1.0-gpsmpi-2020.eb b/Golden_Repo/r/RELION/RELION-3.1.0-gpsmpi-2020.eb deleted file mode 100644 index 44c246aa7ee24523a7d82844a9834d7eceb0b0ab..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/RELION/RELION-3.1.0-gpsmpi-2020.eb +++ /dev/null @@ -1,69 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'RELION' -version = "3.1.0" - -homepage = 'http://www2.mrc-lmb.cam.ac.uk/relion/index.php/Main_Page' -description = """RELION (for REgularised LIkelihood OptimisatioN, pronounce -rely-on) is a stand-alone computer program that employs an empirical Bayesian -approach to refinement of (multiple) 3D reconstructions or 2D class averages in -electron cryo-microscopy (cryo-EM). """ - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['https://github.com/3dem/relion/archive'] -sources = ['%(version)s.tar.gz'] -patches = [ - ('relion_ssub.sh', 'scripts') -] -checksums = [ - 'fa5ebef27a15f1df856a64dc8960e7c381ff646818f778fe746249d0b7682ba2', # 3.1.0.tar.gz - '48a73111aaec4719af83b9e6cd81014bb12fc8f86953608084becb859ec4be57', # relion_ssub.sh -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('FFTW', '3.3.8'), # RELION uses the threaded libraries from here - ('FLTK', '1.3.5'), - ('LibTIFF', '4.1.0'), - ('X11', '20200222'), - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('freetype', '2.10.1'), - ('fontconfig', '2.13.92'), -] - -# Note RELION automatically picks up the threaded fftw3 libs -preconfigopts = 'export LDFLAGS="-lXft -lfontconfig -lXext -lXinerama ' -preconfigopts += ' -lXcursor -lXfixes -ldl -lpthread -lXrender $LDFLAGS" && ' - -configopts = '-DCUDA_ARCH=80\ -gencode=arch=compute_70,code=sm_70 -DCUDA_SDK_ROOT_DIR=$EBROOTCUDA ' -configopts += '-DFLTK_DIR=$EBROOTFLTK -DX11_INCLUDES=$EBROOTX11/include' - - -# users expect the maingui binary to be called relion -postinstallcmds = [ - "cp %(installdir)s/bin/relion_maingui %(installdir)s/bin/%(namelower)s", - "cp %(builddir)s/*/scripts/relion_ssub.sh %(installdir)s/bin" -] - -modextravars = { - "RELION_QSUB_EXTRA1": "Max number of hours running", - "RELION_QSUB_EXTRA1_DEFAULT": "1", - "RELION_QSUB_EXTRA2": "Number of tasks per node (max 48)", - "RELION_QSUB_EXTRA2_DEFAULT": "24", - "RELION_QSUB_TEMPLATE": "%(installdir)s/bin/relion_ssub.sh" -} - -sanity_check_paths = { - 'files': ['bin/relion_maingui'], - 'dirs': [] -} - -moduleclass = 'bio' diff --git a/Golden_Repo/r/RELION/relion_ssub.sh b/Golden_Repo/r/RELION/relion_ssub.sh deleted file mode 100644 index bc549c71ea6be731216ad3084b35c5f04afd6ccc..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/RELION/relion_ssub.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -x -#SBATCH --ntasks=XXXmpinodesXXX -#SBATCH --ntasks-per-node=XXXextra2XXX -#SBATCH --cpus-per-task=XXXthreadsXXX -#SBATCH --time=XXXextra1XXX:00:00 -#SBATCH --partition=XXXqueueXXX -#SBATCH --output=XXXoutfileXXX -#SBATCH --error=XXXerrfileXXX - -export OMP_NUM_THREADS=${SLURM_CPUS_PER_TASK} -srun XXXcommandXXX diff --git a/Golden_Repo/r/Ruby/Ruby-2.7.1-GCCcore-9.3.0.eb b/Golden_Repo/r/Ruby/Ruby-2.7.1-GCCcore-9.3.0.eb deleted file mode 100644 index 880d2f48828bdbf45bf6398ebdf0f267504838f1..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/Ruby/Ruby-2.7.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,186 +0,0 @@ -name = 'Ruby' -version = '2.7.1' - -homepage = 'https://www.ruby-lang.org' -description = """Ruby is a dynamic, open source programming language with - a focus on simplicity and productivity. It has an elegant syntax that is - natural to read and easy to write.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://cache.ruby-lang.org/pub/ruby/%(version_major_minor)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['d418483bdd0000576c1370571121a6eb24582116db0b7bb2005e90e250eae418'] - -builddependencies = [('binutils', '2.34')] - -dependencies = [ - ('ZeroMQ', '4.3.3') -] - -exts_default_options = { - 'source_urls': ['https://rubygems.org/downloads/'], - 'source_tmpl': '%(name)s-%(version)s.gem', -} - -# !! order of packages is important !! -# some packages have dependencies with minimum and maximum version requirements -# each version is picked as high as possible to fullfill all requirements -# packages updated on 8 April 2020 -exts_list = [ - ('ffi', '1.12.2', { - 'checksums': ['048ad01d5369f67075f943c16f1058f10663af2a66eedb87d921316ba1828e82'], - }), - ('childprocess', '3.0.0', { - 'checksums': ['4579a87cdc962de252eebf1482a4185fad383ae7dbe29a746ba2be8e261280c5'], - }), - ('json', '2.2.0', { - 'checksums': ['9dd1437156773f72c096058ec837faac1b00077121a3fd574e68f895ea3aa96b'], - }), - ('cabin', '0.9.0', { - 'checksums': ['91c5394289e993e7037a6c869e3f212f31a5984d2b1811ac934f591c87446b2c'], - }), - ('backports', '3.17.0', { - 'checksums': ['bb18a4c7a2a13828d18e348ea81183554adcaac4fc9db0ecd1f3d1dfbd7fdc8f'], - }), - ('arr-pm', '0.0.10', { - 'checksums': ['0fa2d425a84eeeaf8f95268568e826723b41aeefa14511d1f0ed1368c30bdd1f'], - }), - ('clamp', '1.3.1', { - 'checksums': ['0975e919dc76cfb1a63264fec5c2b97c4bba54cbdec7894ab9e77f0e87436a36'], - }), - ('multipart-post', '1.2.0', { - 'checksums': ['ccf26a61706f128a2a3f3745aba0f79fde7b6fee068e03490eb189c2aaa5e78a'], - }), - ('faraday', '0.17.3', { - 'checksums': ['c27a8fd7614f1abe9889d8b797246a8e0e5aa6e9d7f1e34ba05a445bf5844f8d'], - }), - ('faraday_middleware', '0.14.0', { - 'checksums': ['4cb37ddd656b2c4de0bd684b72b08c34486f70560c31cb303cd506faef7ef2f4'], - }), - ('highline', '2.0.3', { - 'checksums': ['2ddd5c127d4692721486f91737307236fe005352d12a4202e26c48614f719479'], - }), - ('net-http-pipeline', '1.0.1', { - 'checksums': ['6923ce2f28bfde589a9f385e999395eead48ccfe4376d4a85d9a77e8c7f0b22f'], - }), - ('connection_pool', '2.2.2', { - 'checksums': ['c8cc9446bcc51034103c1259ad70b91dc9f5297d13460b2c0cce7e5a93e8d451'], - }), - ('net-http-persistent', '2.9.4', { - 'checksums': ['24274d207ffe66222ef70c78a052c7ea6e66b4ff21e2e8a99e3335d095822ef9'], - }), - ('multi_json', '1.14.1', { - 'checksums': ['d971296c0eacea289d31e4a7ab7ac5eda97262c62bbc8c110de4f5e36425c577'], - }), - ('public_suffix', '4.0.3', { - 'checksums': ['87a9b64575e6d04a2e83882a2610470ea47132828c96650610b4c511b4c1d3b0'], - }), - ('addressable', '2.7.0', { - 'checksums': ['5e9b62fe1239091ea9b2893cd00ffe1bcbdd9371f4e1d35fac595c98c5856cbb'], - }), - ('concurrent-ruby', '1.1.6', { - 'checksums': ['14da21d5cfe9ccb02e9359b01cb7291e0167ded0ec805d4f3a4b2b4ffa418324'], - }), - ('i18n', '1.8.2', { - 'checksums': ['95cf147081cc6f1dbfb32a8f29a03afe8b0f4da6a300d37d34e0f74a6868994b'], - }), - ('minitest', '5.14.0', { - 'checksums': ['dfe35170edd195c3f32b43c2326a776e687f9efb330f185e43f0ca0a8be9e33c'], - }), - ('thread_safe', '0.3.6', { - 'checksums': ['9ed7072821b51c57e8d6b7011a8e282e25aeea3a4065eab326e43f66f063b05a'], - }), - ('tzinfo', '1.2.7', { - 'checksums': ['3945d8a57c62a59e691d527ae4daaf562d6e07a3c0d032876c6b066e108072c4'], - }), - ('activesupport', '5.2.4.2', { - 'checksums': ['8c3ae3df5b08b49b6b5d9c5028da1a1e582f1243b7362dbb9736f65ede492378'], - }), - ('gh', '0.16.0', { - 'checksums': ['e14197ffd0eb0a6379c61c41987f12a77977c4d6e8402cc21977e523b2f08caa'], - }), - ('launchy', '2.5.0', { - 'checksums': ['954243c4255920982ce682f89a42e76372dba94770bf09c23a523e204bdebef5'], - }), - ('ethon', '0.12.0', { - 'checksums': ['e99d3095e89f82c5a7e63d9261ddf4a21f28ae5d12a9d3abaa6920cce6cbef3d'], - }), - ('typhoeus', '1.3.1', { - 'checksums': ['257e7163d50bed15e52c3c25bde890ea3ad854f3bd2e3fd16ce0b216c342d132'], - }), - ('websocket', '1.2.8', { - 'checksums': ['1d8155c1cdaab8e8e72587a60e08423c9dd84ee44e4e827358ce3d4c2ccb2138'], - }), - ('pusher-client', '0.6.2', { - 'checksums': ['c405c931090e126c056d99f6b69a01b1bcb6cbfdde02389c93e7d547c6efd5a3'], - }), - ('diff-lcs', '1.3', { - 'checksums': ['ea7bf591567e391ef262a7c29edaf87c6205204afb5bb39dfa8f08f2e51282a3'], - }), - ('rspec-support', '3.9.2', { - 'checksums': ['83c15ff18b4ff540872c8a6535bae14b2ec76dda42f7cbd4db77086503f697ff'], - }), - ('rspec-mocks', '3.9.1', { - 'checksums': ['b9820ed35afbc552b4448ccb98984ffe20e92bff6760dbd60f4fec503d6e75a7'], - }), - ('rspec-expectations', '3.9.1', { - 'checksums': ['0d7e5c5b026c89de04dabc1f875866fc131fb1c049af00f580e62b7cf0e64b3a'], - }), - ('rspec-core', '3.9.1', { - 'checksums': ['6de3bb5968447225cfd7fe7056a9d80d961f72c021222a6b5c1687251b0fece3'], - }), - ('rspec', '3.9.0', { - 'checksums': ['90a037a7cc02365d7c112201881839aafbc875a88094423bc8cba778c98bfac3'], - }), - ('rack', '2.2.2', { - 'checksums': ['77f51f9a1409e388a7c002612311fc8cef06f70309eba90800dc6a8d884eb782'], - }), - ('rack-protection', '2.0.8.1', { - 'checksums': ['d187dee7708ca93301854127e81b2675b60af86ab53532f9735087ecd649d2ff'], - }), - ('tilt', '2.0.10', { - 'checksums': ['9b664f0e9ae2b500cfa00f9c65c34abc6ff1799cf0034a8c0a0412d520fac866'], - }), - ('ruby2_keywords', '0.0.2', { - 'checksums': ['145c91edd2ef4c509403328ed05ae4387a8841b7a3ae93679e71c0fd3860ec9e'], - }), - ('mustermann', '1.1.1', { - 'checksums': ['0a21cfe505869cce9ce17998db5260344e78df81ae857c07a62143fd30299531'], - }), - ('sinatra', '2.0.8.1', { - 'checksums': ['b8845f060fde0157940172a4d006b757f3ba6a5ea36326c7c9352c25391c3e66'], - }), - ('rack-test', '1.1.0', { - 'checksums': ['154161f40f162b1c009a655b7b0c5de3a3102cc6d7d2e94b64e1f46ace800866'], - }), - ('bundler', '2.1.4', { - 'checksums': ['50014d21d6712079da4d6464de12bb93c278f87c9200d0b60ba99f32c25af489'], - }), - - ('bond', '0.5.1', { - 'checksums': ['20710d17b16609b0dfbde9d828964b5eadb565513dbb68fb13d44f4d988b29e4'], - }), - ('data_uri', '0.1.0', { - 'checksums': ['7eb2f63487ccb943fae0eca561729c48d4d5654d76f8330aa16ed1dcdbebf33b'], - }), - ('mimemagic', '0.3.4', { - 'checksums': ['3751635874b1b0d3ea7daae37be571a5f60cbbbf2977b174d8043a51fe76393b'], - }), - ('multi_json', '1.14.1', { - 'checksums': ['d971296c0eacea289d31e4a7ab7ac5eda97262c62bbc8c110de4f5e36425c577'], - }), - ('ffi-rzmq-core', '1.0.7', { - 'checksums': ['6541625a0f80016e4cb1b22a68b3870bd711c30de7d77625d8d6c92be95eb32a'], - }), - ('ffi-rzmq', '2.0.7', { - 'checksums': ['2feb3bc5bf46df633e2211514ac408521df0c198f54134fdb38322675d9f4591'], - }), - ('iruby', '0.4.0', { - 'checksums': ['51a521a61184cac9fdd2ab6f5a1e41ba2ff4f09684e06ad47ebf2a48825092d3'], - }), -] - -moduleclass = 'lang' diff --git a/Golden_Repo/r/Ruby/Ruby-2.7.2-GCCcore-10.3.0.eb b/Golden_Repo/r/Ruby/Ruby-2.7.2-GCCcore-10.3.0.eb deleted file mode 100644 index 85844c1abf37cfaf6b0ff7563bd9aee7bb851c90..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/Ruby/Ruby-2.7.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,197 +0,0 @@ -name = 'Ruby' -version = '2.7.2' - -homepage = 'https://www.ruby-lang.org' -description = """Ruby is a dynamic, open source programming language with - a focus on simplicity and productivity. It has an elegant syntax that is - natural to read and easy to write.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://cache.ruby-lang.org/pub/ruby/%(version_major_minor)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6e5706d0d4ee4e1e2f883db9d768586b4d06567debea353c796ec45e8321c3d4'] - -builddependencies = [('binutils', '2.36.1')] - -exts_default_options = { - 'source_urls': ['https://rubygems.org/downloads/'], - 'source_tmpl': '%(name)s-%(version)s.gem', -} - -# !! order of packages is important !! -# some packages have dependencies with minimum and maximum version requirements -# each version is picked as high as possible to fullfill all requirements -# packages updated on 22 Feb 2021 -exts_list = [ - ('ffi', '1.14.2', { - 'checksums': ['7dce3fe0f047a6f774d141d4667c9d23ee28c8dc72fbef6194dca398808f0f96'], - }), - ('childprocess', '4.0.0', { - 'checksums': ['72bc4c3e63b88d29e5faebb981e809562ba643c1939b46b465294447451b0d22'], - }), - ('json', '2.5.1', { - 'checksums': ['918d8c41dacb7cfdbe0c7bbd6014a5372f0cf1c454ca150e9f4010fe80cc3153'], - }), - ('cabin', '0.9.0', { - 'checksums': ['91c5394289e993e7037a6c869e3f212f31a5984d2b1811ac934f591c87446b2c'], - }), - ('backports', '3.20.2', { - 'checksums': ['3b42e282071c72c6a1e9b3f3a5ff1b2f37e38b78d1adeb8dcffcd9604fa198b2'], - }), - ('arr-pm', '0.0.10', { - 'checksums': ['0fa2d425a84eeeaf8f95268568e826723b41aeefa14511d1f0ed1368c30bdd1f'], - }), - ('clamp', '1.3.2', { - 'checksums': ['4f6a99a8678d51abbf1650263a74d1ac50939edc11986271431d2e03a0d7a022'], - }), - ('multipart-post', '2.1.1', { - 'checksums': ['d2dd7aa957650e0d99e0513cd388401b069f09528441b87d884609c8e94ffcfd'], - }), - ('ruby2_keywords', '0.0.4', { - 'checksums': ['3ae3189c2e1d2f60204dcceedf890ff49dd28979771e2576016a3ee73b668e97'], - }), - ('faraday-net_http', '1.0.1', { - 'checksums': ['3245ce406ebb77b40e17a77bfa66191dda04be2fd4e13a78d8a4305854d328ba'], - }), - ('faraday', '1.3.0', { - 'checksums': ['7f06817ba823ddeabf4dd85ca5e0f63e234527f564a03f09f6c7127951d3bac2'], - }), - ('faraday_middleware', '1.0.0', { - 'checksums': ['19e808539681bbf2e65df30dfbe27bb402bde916a1dceb4c7496dbe8de14334a'], - }), - ('highline', '2.0.3', { - 'checksums': ['2ddd5c127d4692721486f91737307236fe005352d12a4202e26c48614f719479'], - }), - ('net-http-pipeline', '1.0.1', { - 'checksums': ['6923ce2f28bfde589a9f385e999395eead48ccfe4376d4a85d9a77e8c7f0b22f'], - }), - ('connection_pool', '2.2.3', { - 'checksums': ['8e5bf44b6bfa96f5c94a5c30ae2447fce3dbcc0828a855a6c513fdb015a133e2'], - }), - ('net-http-persistent', '2.9.4', { - 'checksums': ['24274d207ffe66222ef70c78a052c7ea6e66b4ff21e2e8a99e3335d095822ef9'], - }), - ('multi_json', '1.15.0', { - 'checksums': ['1fd04138b6e4a90017e8d1b804c039031399866ff3fbabb7822aea367c78615d'], - }), - ('public_suffix', '4.0.6', { - 'checksums': ['a99967c7b2d1d2eb00e1142e60de06a1a6471e82af574b330e9af375e87c0cf7'], - }), - ('addressable', '2.7.0', { - 'checksums': ['5e9b62fe1239091ea9b2893cd00ffe1bcbdd9371f4e1d35fac595c98c5856cbb'], - }), - ('concurrent-ruby', '1.1.8', { - 'checksums': ['e35169e8e01c33cddc9d322e4e793a9bc8c3c00c967d206d17457e0d301f2257'], - }), - ('i18n', '1.8.9', { - 'checksums': ['62a877ff6b5fdb4c20ca614c6bfcd6abd7b426d883069e59ce41a6744758e622'], - }), - ('minitest', '5.14.3', { - 'checksums': ['b2f80910d5ae04615c1f94645882b87cf22c17123c0ebd0e4cd8e8867883f246'], - }), - ('thread_safe', '0.3.6', { - 'checksums': ['9ed7072821b51c57e8d6b7011a8e282e25aeea3a4065eab326e43f66f063b05a'], - }), - ('tzinfo', '1.2.9', { - 'checksums': ['725b865cc72ac00ef21f1413fe55e01400dfb95f6e7317c45848a3110cc4987f'], - }), - ('zeitwerk', '2.4.2', { - 'checksums': ['f4d6279b0ef0cb35c9e1bddbcecd6b148df82f1cf7b8023ac03d6625f567869c'], - }), - ('activesupport', '5.2.4.5', { - 'checksums': ['8c25759e6c5df1de673e9bfc35e42f4c5e021d47a8dde103a00f17f1467ee43a'], - }), - ('gh', '0.18.0', { - 'checksums': ['eb93f18a88db3ba92eb888610fc53fae731d9dacfe55922b58cc3f3aca776a47'], - }), - ('launchy', '2.5.0', { - 'checksums': ['954243c4255920982ce682f89a42e76372dba94770bf09c23a523e204bdebef5'], - }), - ('ethon', '0.12.0', { - 'checksums': ['e99d3095e89f82c5a7e63d9261ddf4a21f28ae5d12a9d3abaa6920cce6cbef3d'], - }), - ('typhoeus', '1.4.0', { - 'checksums': ['fff9880d5dc35950e7706cf132fd297f377c049101794be1cf01c95567f642d4'], - }), - ('websocket', '1.2.9', { - 'checksums': ['884b12dee993217795bb5f58acc89c0121c88bdc99df4d1636c0505dca352b36'], - }), - ('pusher-client', '0.6.2', { - 'checksums': ['c405c931090e126c056d99f6b69a01b1bcb6cbfdde02389c93e7d547c6efd5a3'], - }), - ('diff-lcs', '1.4.4', { - 'checksums': ['bf3a658875f70c1467fe7a3b302b795f074c84b28db6e4a2bd6b1ad6d12a2255'], - }), - ('rspec-support', '3.10.2', { - 'checksums': ['74315f89069fbaf2a710e2117340373b77ee45eceb026de87e0cad9dd5154596'], - }), - ('rspec-mocks', '3.10.2', { - 'checksums': ['93fc76e312c3d19cacc1cb2eb64bf82731de2e216295cf2b4d0ce31ba77923b4'], - }), - ('rspec-expectations', '3.10.1', { - 'checksums': ['27acf5d5df13f8cc8f7158001ebf572513bcec3d45404ba76e0a8998895ce9eb'], - }), - ('rspec-core', '3.10.1', { - 'checksums': ['ac9abdc9577a3a34e9e92815603da8343931055ab4fba1c2a49de6dd3b749673'], - }), - ('rspec', '3.10.0', { - 'checksums': ['b870b43d49ae4a4e063b94976d2742b0854ec10458c425d569b5556ee5898ab7'], - }), - ('rack', '2.2.3', { - 'checksums': ['2638e7eb6689a5725c7e16f30cc4aa4e31694dc3ca30d790952526781bd0bb44'], - }), - ('rack-protection', '2.1.0', { - 'checksums': ['1f523c16e5b32f139c8f6f1e3b3eb53aaa7a69bc79a30f3e80f8a93c89242a95'], - }), - ('tilt', '2.0.10', { - 'checksums': ['9b664f0e9ae2b500cfa00f9c65c34abc6ff1799cf0034a8c0a0412d520fac866'], - }), - ('mustermann', '1.1.1', { - 'checksums': ['0a21cfe505869cce9ce17998db5260344e78df81ae857c07a62143fd30299531'], - }), - ('sinatra', '2.1.0', { - 'checksums': ['f323e4446f3e2a132dcaaa134f89caddb29dd88370317f4f32faf5797f1ea535'], - }), - ('rack-test', '1.1.0', { - 'checksums': ['154161f40f162b1c009a655b7b0c5de3a3102cc6d7d2e94b64e1f46ace800866'], - }), - ('bundler', '2.2.11', { - 'checksums': ['4ea2e025ced4c8487ab5e25a80978e3973b870f9e1bfaffab2d5d4263537fdc7'], - }), - # for Jupyter kernel - ('mime-types-data', '3.2021.0704', { - 'checksums': ['9ecd5aa031d5483156cd7a4e7d1956881d75f7ed8c08d487900df8580ee39d36'], - }), - ('mime-types', '3.3.1', { - 'checksums': ['708f737e28ceef48b9a1bc041aa9eec46fa36eb36acb95e6b64a9889131541fe'], - }), - ('data_uri', '0.1.0', { - 'checksums': ['7eb2f63487ccb943fae0eca561729c48d4d5654d76f8330aa16ed1dcdbebf33b'], - }), - ('io-console', '0.5.9', { - 'checksums': ['4aaefa9e601433071a92a4761a9f0ceb6507975d31ac75a6bd2a7c043b77aa5e'], - }), - ('reline', '0.2.6', { - 'checksums': ['ea2f3c3209feb17594dbd33242719fa24ca0fd29a63a1b49d2a6035e265a5183'], - }), - ('irb', '1.3.6', { - 'checksums': ['79c73e1cada8aee635a07a0dbb443fc6bfac357903d277476505d8700b949e68'], - }), - ('multi_json', '1.15.0', { - 'checksums': ['1fd04138b6e4a90017e8d1b804c039031399866ff3fbabb7822aea367c78615d'], - }), - ('ffi-rzmq-core', '1.0.7', { - 'checksums': ['6541625a0f80016e4cb1b22a68b3870bd711c30de7d77625d8d6c92be95eb32a'], - }), - ('ffi-rzmq', '2.0.7', { - 'checksums': ['2feb3bc5bf46df633e2211514ac408521df0c198f54134fdb38322675d9f4591'], - }), - ('iruby', '0.7.3', { - 'checksums': ['04d49dc6024168b67ab29a2458eae3e567756223a1e5eeb285b8ecfaad6e8fc3'], - }), -] - -moduleclass = 'lang' diff --git a/Golden_Repo/r/Rust/Rust-1.47.0-GCCcore-10.3.0.eb b/Golden_Repo/r/Rust/Rust-1.47.0-GCCcore-10.3.0.eb deleted file mode 100644 index 65ef3a50981165948779824dbdf8fe265e875722..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/Rust/Rust-1.47.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Rust' -version = '1.47.0' - -homepage = 'https://www.rust-lang.org' -description = """Rust is a systems programming language that runs blazingly fast, prevents segfaults, - and guarantees thread safety.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://static.rust-lang.org/dist/'] -sources = ['rustc-%(version)s-src.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - ('Python', '3.8.5'), - ('cURL', '7.71.1'), - ('git', '2.28.0'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -configopts = "--enable-extended --sysconfdir=%(installdir)s/etc" - -sanity_check_paths = { - 'files': ['bin/cargo', 'bin/rustc', 'bin/rustdoc'], - 'dirs': ['lib/rustlib', 'share/doc', 'share/man'], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/r/Rust/Rust-1.47.0-GCCcore-9.3.0.eb b/Golden_Repo/r/Rust/Rust-1.47.0-GCCcore-9.3.0.eb deleted file mode 100644 index 8897087c2befe75f9f2c363a18bb49c14b44f5e2..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/Rust/Rust-1.47.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Rust' -version = '1.47.0' - -homepage = 'https://www.rust-lang.org' -description = """Rust is a systems programming language that runs blazingly fast, prevents segfaults, - and guarantees thread safety.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://static.rust-lang.org/dist/'] -sources = ['rustc-%(version)s-src.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - ('Python', '3.8.5'), - ('cURL', '7.71.1'), - ('git', '2.28.0'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -configopts = "--enable-extended --sysconfdir=%(installdir)s/etc" - -sanity_check_paths = { - 'files': ['bin/cargo', 'bin/rustc', 'bin/rustdoc'], - 'dirs': ['lib/rustlib', 'share/doc', 'share/man'], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/r/re2c/re2c-1.3-GCCcore-10.3.0.eb b/Golden_Repo/r/re2c/re2c-1.3-GCCcore-10.3.0.eb deleted file mode 100644 index 2bba391be38a018de725bb7517bd560299b2f2c5..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/re2c/re2c-1.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 're2c' -version = '1.3' - -homepage = 'https://re2c.org/' -description = """re2c is a free and open-source lexer generator for C and C++. Its main goal is generating -fast lexers: at least as fast as their reasonably optimized hand-coded counterparts. Instead of using -traditional table-driven approach, re2c encodes the generated finite state automata directly in the form -of conditional jumps and comparisons.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/skvadrik/re2c/releases/download/%(version)s'] -sources = ['%(name)s-%(version)s.tar.xz'] -checksums = ['f37f25ff760e90088e7d03d1232002c2c2672646d5844fdf8e0d51a5cd75a503'] - -builddependencies = [('binutils', '2.36.1')] - -sanity_check_paths = { - 'files': ['bin/re2c'], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/r/re2c/re2c-1.3-GCCcore-9.3.0.eb b/Golden_Repo/r/re2c/re2c-1.3-GCCcore-9.3.0.eb deleted file mode 100644 index 3584f1b4bec25dc86b38fa1fe568378ab6444968..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/re2c/re2c-1.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 're2c' -version = '1.3' - -homepage = 'https://re2c.org/' -description = """re2c is a free and open-source lexer generator for C and C++. Its main goal is generating -fast lexers: at least as fast as their reasonably optimized hand-coded counterparts. Instead of using -traditional table-driven approach, re2c encodes the generated finite state automata directly in the form -of conditional jumps and comparisons.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/skvadrik/re2c/releases/download/%(version)s'] -sources = ['%(name)s-%(version)s.tar.xz'] -checksums = ['f37f25ff760e90088e7d03d1232002c2c2672646d5844fdf8e0d51a5cd75a503'] - -builddependencies = [('binutils', '2.34')] - -sanity_check_paths = { - 'files': ['bin/re2c'], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/r/rencode/rencode-1.0.5-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/r/rencode/rencode-1.0.5-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 1bd91c49a5157435132aa3b446e24dfbd068e6e6..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/rencode/rencode-1.0.5-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'rencode' -version = '1.0.5' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pypi.python.org/pypi/rencode/' -description = """ -The rencode module is similar to bencode from the BitTorrent project. -For complex, heterogeneous data structures with many small elements, -r-encodings take up significantly less space than b-encodings. -This version of rencode is a complete rewrite in Cython to attempt to -increase the performance over the pure Python module written by Petru Paler, Connelly Barnes et al. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [PYPI_SOURCE] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Python', '3.8.5'), -] - -maxparallel = 12 - -use_pip = True -sanity_pip_check = True -download_dep_fail = True - -options = {'modulename': 'rencode'} - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/r/rencode/rencode-1.0.5-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/r/rencode/rencode-1.0.5-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 9a3883116196a44240b3bdc026a9e678e209c82e..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/rencode/rencode-1.0.5-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'rencode' -version = '1.0.5' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://pypi.python.org/pypi/rencode/' -description = """ -The rencode module is similar to bencode from the BitTorrent project. -For complex, heterogeneous data structures with many small elements, -r-encodings take up significantly less space than b-encodings. -This version of rencode is a complete rewrite in Cython to attempt to -increase the performance over the pure Python module written by Petru Paler, Connelly Barnes et al. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = [PYPI_SOURCE] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), -] - -maxparallel = 12 - -use_pip = True -sanity_pip_check = True -download_dep_fail = True - -options = {'modulename': 'rencode'} - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages/'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/r/rkcommon/rkcommon-1.6.1-GCC-10.3.0.eb b/Golden_Repo/r/rkcommon/rkcommon-1.6.1-GCC-10.3.0.eb deleted file mode 100644 index bfee3373087fa1143fc2531bbad02bf4209d332c..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/rkcommon/rkcommon-1.6.1-GCC-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'rkcommon' -version = '1.6.1' - -homepage = 'http://www.ospray.org/' -description = """ -OSPRay is an open source, scalable, and portable ray tracing engine for -high-performance, high-fidelity visualization on Intel® Architecture CPUs. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/ospray/rkcommon/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['b61c10f26fba3e6f00305d5828b3bac523d559c5c0e6f79893b19e8c0e30074e'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('tbb', '2020.3'), -] - -separate_build_dir = True - -start_dir = '%(name)s-%(version)s' - -sanity_check_paths = { - 'dirs': ['bin', 'include/rkcommon'], - 'files': ['lib/librkcommon.so'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/r/rpmrebuild/rpmrebuild-2.15.eb b/Golden_Repo/r/rpmrebuild/rpmrebuild-2.15.eb deleted file mode 100644 index 938c66f06f51237f7dc32b1055f5742b6cff159c..0000000000000000000000000000000000000000 --- a/Golden_Repo/r/rpmrebuild/rpmrebuild-2.15.eb +++ /dev/null @@ -1,33 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2016 Forschungszentrum Juelich GmbH -# Authors:: Damian Alvarez <d.alvarez@fz-juelich.de> -# License:: MIT/GPL -# $Id$ -## - -easyblock = "Tarball" - -name = "rpmrebuild" -version = "2.15" - -homepage = 'http://rpmrebuild.sourceforge.net/' -description = """rpmrebuild is a tool to build an RPM file from a package that has already been installed""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -modextrapaths = {'PATH': ['']} -modextravars = {'RPMREBUILD_ROOT_DIR': '%(installdir)s'} - -sanity_check_paths = { - 'files': ["rpmrebuild"], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gompi-2020.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gompi-2020.eb deleted file mode 100644 index 38f789ffbd6c175664127e20ca7a9702c304ba4c..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gompi-2020.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gompi-2021.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gompi-2021.eb deleted file mode 100644 index f4e023ade874e29081b9b83ddde41b3dd195cf24..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gompi-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -dependencies = [ - ('zlib', '1.2.11'), -] - -postinstallcmds = [ - 'mkdir %(installdir)s/example/', - 'cp %(builddir)s/scotch_%(version)s/doc/scotch_example.f %(installdir)s/example/', -] - -modextravars = { - 'SCOTCH_ROOT': '%(installdir)s', - 'SCOTCH_INCLUDE': '%(installdir)s/include/', - 'SCOTCH_LIB': '%(installdir)s/lib', -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gpsmpi-2020.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gpsmpi-2020.eb deleted file mode 100644 index cb8e12d77bca6f3f931d6aa42732f1b1fcb24766..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gpsmpi-2020.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gpsmpi-2021.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gpsmpi-2021.eb deleted file mode 100644 index 448f8a91d8adbf1178f32e0fad6bd428b7438c60..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-gpsmpi-2021.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -postinstallcmds = [ - 'mkdir %(installdir)s/example/', - 'cp %(builddir)s/scotch_%(version)s/doc/scotch_example.f %(installdir)s/example/', -] - -modextravars = { - 'SCOTCH_ROOT': '%(installdir)s', - 'SCOTCH_INCLUDE': '%(installdir)s/include/', - 'SCOTCH_LIB': '%(installdir)s/lib', -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iimpi-2020.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iimpi-2020.eb deleted file mode 100644 index 6bcee48022bc62b32ce58626a30077b33684d57b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iimpi-2020.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iimpi-2021.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iimpi-2021.eb deleted file mode 100644 index 36c73a1001acb10fa3e0158caa07c2bbe0bc3bbd..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iimpi-2021.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -postinstallcmds = [ - 'mkdir %(installdir)s/example/', - 'cp %(builddir)s/scotch_%(version)s/doc/scotch_example.f %(installdir)s/example/', -] - -modextravars = { - 'SCOTCH_ROOT': '%(installdir)s', - 'SCOTCH_INCLUDE': '%(installdir)s/include/', - 'SCOTCH_LIB': '%(installdir)s/lib', -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iompi-2020.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iompi-2020.eb deleted file mode 100644 index 183e032d3b65b36ab2dd9b032c532e518d717cd9..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iompi-2020.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iompi-2021.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iompi-2021.eb deleted file mode 100644 index 02110229f199e549ecad96228b2e1a3c493d7ebf..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-iompi-2021.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -postinstallcmds = [ - 'mkdir %(installdir)s/example/', - 'cp %(builddir)s/scotch_%(version)s/doc/scotch_example.f %(installdir)s/example/', -] - -modextravars = { - 'SCOTCH_ROOT': '%(installdir)s', - 'SCOTCH_INCLUDE': '%(installdir)s/include/', - 'SCOTCH_LIB': '%(installdir)s/lib', -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-ipsmpi-2020.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-ipsmpi-2020.eb deleted file mode 100644 index 817b2b99642d49864810730ce942e33b3eaa2083..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-ipsmpi-2020.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-ipsmpi-2021.eb b/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-ipsmpi-2021.eb deleted file mode 100644 index 4ddc26730a6a309372dfbf7926047a3d5ca78ddc..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCOTCH/SCOTCH-6.1.0-ipsmpi-2021.eb +++ /dev/null @@ -1,30 +0,0 @@ -name = 'SCOTCH' -version = '6.1.0' - -homepage = 'http://gforge.inria.fr/projects/scotch/' -description = """Software package and libraries for sequential and parallel graph partitioning, -static mapping, and sparse matrix block ordering, and sequential mesh and hypergraph partitioning. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -source_urls = ['http://gforge.inria.fr/frs/download.php/file/38352/'] -sources = ['%(namelower)s_%(version)s.tar.gz'] - -threadedmpi = False - -postinstallcmds = [ - 'mkdir %(installdir)s/example/', - 'cp %(builddir)s/scotch_%(version)s/doc/scotch_example.f %(installdir)s/example/', -] - -modextravars = { - 'SCOTCH_ROOT': '%(installdir)s', - 'SCOTCH_INCLUDE': '%(installdir)s/include/', - 'SCOTCH_LIB': '%(installdir)s/lib', -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/SCons/SCons-3.1.2-GCCcore-10.3.0.eb b/Golden_Repo/s/SCons/SCons-3.1.2-GCCcore-10.3.0.eb deleted file mode 100644 index b8b0e98f2353bb8e3f9ad097dda0dd821414b9e9..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCons/SCons-3.1.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'SCons' -version = '3.1.2' - -homepage = 'http://www.scons.org/' -description = """SCons is a software construction tool.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://prdownloads.sourceforge.net/%(namelower)s'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -sanity_check_paths = { - 'files': ['bin/scons', 'bin/scons-time', 'bin/sconsign'], - 'dirs': ['lib/%(namelower)s/%(name)s'], -} - -options = {'modulename': False} - -moduleclass = 'devel' diff --git a/Golden_Repo/s/SCons/SCons-3.1.2-GCCcore-9.3.0.eb b/Golden_Repo/s/SCons/SCons-3.1.2-GCCcore-9.3.0.eb deleted file mode 100644 index d42b125c6192266b7cd20e826e63b8ad3f1f28c9..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SCons/SCons-3.1.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'SCons' -version = '3.1.2' - -homepage = 'http://www.scons.org/' -description = """SCons is a software construction tool.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://prdownloads.sourceforge.net/%(namelower)s'] - -builddependencies = [ - ('binutils', '2.34'), -] - -sanity_check_paths = { - 'files': ['bin/scons', 'bin/scons-time', 'bin/sconsign'], - 'dirs': ['lib/%(namelower)s/%(name)s'], -} - -options = {'modulename': False} - -moduleclass = 'devel' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-gompi-2020.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-gompi-2020.eb deleted file mode 100644 index fc975a91d2c3ad3770d92d318f080cf961bb5cd6..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-gompi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.0" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -dependencies = [('Boost', '1.74.0')] - -builddependencies = [('CMake', '3.18.0')] - -source_urls = ['https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd/-/archive/v1.0.0/SIONfwd-v%(version)s.tar.gz'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-gpsmpi-2020.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-gpsmpi-2020.eb deleted file mode 100644 index a457fe20ffee2f7cd10cde8804df3c415fba67e6..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-gpsmpi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.0" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -dependencies = [('Boost', '1.74.0')] - -builddependencies = [('CMake', '3.18.0')] - -source_urls = ['https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd/-/archive/v1.0.0/SIONfwd-v%(version)s.tar.gz'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-iimpi-2020.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-iimpi-2020.eb deleted file mode 100644 index 1c4d7f5f4c069bbe5335c5dcd3c14e51a1f30b0d..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-iimpi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.0" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'pic': True} - -dependencies = [('Boost', '1.73.0')] - -builddependencies = [('CMake', '3.18.0')] - -source_urls = ['https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd/-/archive/v1.0.0/SIONfwd-v%(version)s.tar.gz'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-iompi-2020.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-iompi-2020.eb deleted file mode 100644 index 6b2ebcd4c9b61bec2e2bdee56dd43b8153b5226b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-iompi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.0" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'pic': True} - -dependencies = [('Boost', '1.73.0')] - -builddependencies = [('CMake', '3.18.0')] - -source_urls = ['https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd/-/archive/v1.0.0/SIONfwd-v%(version)s.tar.gz'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-ipsmpi-2020.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-ipsmpi-2020.eb deleted file mode 100644 index b1ad7c1ae4478f251e6746c7117621d97fb87e22..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.0-ipsmpi-2020.eb +++ /dev/null @@ -1,25 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.0" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -dependencies = [('Boost', '1.73.0')] - -builddependencies = [('CMake', '3.18.0')] - -source_urls = ['https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd/-/archive/v1.0.0/SIONfwd-v%(version)s.tar.gz'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-gompi-2021.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-gompi-2021.eb deleted file mode 100644 index 047d147b503d6ef5ace9abdb37d433f58afa980a..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-gompi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.1" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'gompi', 'version': '2021'} -toolchainopts = {'pic': True} - -builddependencies = [('CMake', '3.18.0', '', SYSTEM)] - -source_urls = ['https://gitlab.jsc.fz-juelich.de/SIONlib/SIONfwd/-/archive/v%(version)s/'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-gpsmpi-2021.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-gpsmpi-2021.eb deleted file mode 100644 index 66e0807bfda6c61cbef1ba0226688463e8f0add6..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-gpsmpi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.1" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -builddependencies = [('CMake', '3.18.0', '', SYSTEM)] - -source_urls = ['https://gitlab.jsc.fz-juelich.de/SIONlib/SIONfwd/-/archive/v%(version)s/'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-iimpi-2021.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-iimpi-2021.eb deleted file mode 100644 index b040d6141e594cb3286a5cf6845cb4f1cd621d3e..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-iimpi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.1" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2021'} -toolchainopts = {'pic': True} - -builddependencies = [('CMake', '3.18.0', '', SYSTEM)] - -source_urls = ['https://gitlab.jsc.fz-juelich.de/SIONlib/SIONfwd/-/archive/v%(version)s/'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-iompi-2021.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-iompi-2021.eb deleted file mode 100644 index 85ed7700a51e2a07c52006c22e1ad83801ac6152..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-iompi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.1" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'iompi', 'version': '2021'} -toolchainopts = {'pic': True} - -builddependencies = [('CMake', '3.18.0', '', SYSTEM)] - -source_urls = ['https://gitlab.jsc.fz-juelich.de/SIONlib/SIONfwd/-/archive/v%(version)s/'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-ipsmpi-2021.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-ipsmpi-2021.eb deleted file mode 100644 index 982b8c6716abbf356ca3a0d295b4b471ae1db4cd..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-ipsmpi-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.1" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'pic': True} - -builddependencies = [('CMake', '3.18.0', '', SYSTEM)] - -source_urls = ['https://gitlab.jsc.fz-juelich.de/SIONlib/SIONfwd/-/archive/v%(version)s/'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-npsmpic-2021.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-npsmpic-2021.eb deleted file mode 100644 index 2feff3c78c3caca6900f04d25c3a44f495927ea7..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-npsmpic-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.1" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'npsmpic', 'version': '2021'} -toolchainopts = {'pic': True} - -builddependencies = [('CMake', '3.18.0', '', SYSTEM)] - -source_urls = ['https://gitlab.jsc.fz-juelich.de/SIONlib/SIONfwd/-/archive/v%(version)s/'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-nvompic-2021.eb b/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-nvompic-2021.eb deleted file mode 100644 index 17414596b1048cfd6d4b1a6493dc76b97cd6ce71..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONfwd/SIONfwd-1.0.1-nvompic-2021.eb +++ /dev/null @@ -1,23 +0,0 @@ -easyblock = "CMakeMake" -name = "SIONfwd" -version = "1.0.1" - -homepage = 'https://gitlab.version.fz-juelich.de/SIONlib/SIONfwd' -description = 'I/O forwarding for SIONlib' - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'nvompic', 'version': '2021'} -toolchainopts = {'pic': True} - -builddependencies = [('CMake', '3.18.0', '', SYSTEM)] - -source_urls = ['https://gitlab.jsc.fz-juelich.de/SIONlib/SIONfwd/-/archive/v%(version)s/'] -sources = ['SIONfwd-v%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': ["bin/sionfwd-server"], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-GCCcore-9.3.0-tools.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.6-GCCcore-9.3.0-tools.eb deleted file mode 100644 index 6694ddc843d8b0e66ad8dc11941cff2b3f8d075c..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-GCCcore-9.3.0-tools.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.6" -versionsuffix = '-tools' - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for parallel access to task-local files. -The library not only supports writing and reading binary data to or from several thousands of -processors into a single or a small number of physical files, but also provides global open -and close functions to access SIONlib files in parallel. This package provides a stripped-down -installation of SIONlib for use with performance tools (e.g., Score-P), with renamed symbols -to avoid conflicts when an application using SIONlib itself is linked against a tool requiring -a different SIONlib version. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = ['sionlib-%(version)sl.tar.gz'] -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)sl'] - -builddependencies = [ - ('binutils', '2.34') -] - -hidden = True - -configopts = '--disable-mic --disable-cxx --disable-fortran --disable-ompi' - -sanity_check_paths = { - 'files': ['bin/sionconfig'] + - ['lib/lib%s_64.a' % x for x in ['lsioncom', 'lsiongen', 'lsionser']], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-gompi-2020.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.6-gompi-2020.eb deleted file mode 100644 index 4dfc8579f7c507e680d43d6aa5256bdd914108f0..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-gompi-2020.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.6" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'gompi', 'version': '2020'} - -dependencies = [('SIONfwd', '1.0.0')] - -builddependencies = [('pkg-config', '0.29.2')] - -configopts = '--disable-mic --compiler=gnu --mpi=openmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-gpsmpi-2020.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.6-gpsmpi-2020.eb deleted file mode 100644 index ab5a236c6bb341c6482a4ad49d589234b7871374..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-gpsmpi-2020.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.6" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -dependencies = [('SIONfwd', '1.0.0')] - -builddependencies = [('pkg-config', '0.29.2')] - -patches = ['sionlib_psmpi.patch'] - -configopts = '--disable-mic --compiler=gnu --mpi=psmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-iimpi-2020.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.6-iimpi-2020.eb deleted file mode 100644 index 10d1aed68c12b58dc5dd15699a8436a626e86409..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-iimpi-2020.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.6" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -dependencies = [('SIONfwd', '1.0.0')] - -builddependencies = [('pkg-config', '0.29.2')] - -configopts = '--disable-mic --compiler=intel --mpi=mpich3 --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-iompi-2020.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.6-iompi-2020.eb deleted file mode 100644 index b8d50430b1c6102d760ceabd81f2b6785c0139c1..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-iompi-2020.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.6" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'iompi', 'version': '2020'} - -dependencies = [('SIONfwd', '1.0.0')] - -builddependencies = [('pkg-config', '0.29.2')] - -configopts = '--disable-mic --compiler=intel --mpi=openmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-ipsmpi-2020.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.6-ipsmpi-2020.eb deleted file mode 100644 index 2bc93b6ad10a36da81289a18ce2c1dc8af4dca9f..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-ipsmpi-2020.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.6" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -dependencies = [('SIONfwd', '1.0.0')] - -builddependencies = [('pkg-config', '0.29.2')] - -patches = ['sionlib_psmpi.patch'] - -configopts = '--disable-mic --compiler=intel --mpi=psmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-npsmpic-2020.1.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.6-npsmpic-2020.1.eb deleted file mode 100644 index 5a2e60d8eee65f27908433ecc35d1b34ebf150c3..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-npsmpic-2020.1.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.6" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'npsmpic', 'version': '2020.1'} - -patches = ['sionlib_psmpi.patch'] - -configopts = '--disable-mic --compiler=pgi --mpi=psmpi --disable-cxx ' -configopts += 'CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-npsmpic-2020.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.6-npsmpic-2020.eb deleted file mode 100644 index 3aad3ca2fa2785bfe83015e05fe7f0875addd92b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.6-npsmpic-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.6" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Benedikt Steinbusch <b.steinbusch@fz-juelich.de>' - -toolchain = {'name': 'npsmpic', 'version': '2020'} - -patches = ['sionlib_psmpi.patch'] - -configopts = '--disable-mic --compiler=pgi --mpi=psmpi --disable-cxx ' -configopts += 'CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-GCCcore-10.3.0-tools.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.7-GCCcore-10.3.0-tools.eb deleted file mode 100644 index cbba631a5b2c0274e70949c5f3b28b508cb1f868..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-GCCcore-10.3.0-tools.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.7" -versionsuffix = '-tools' - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for parallel access to task-local files. -The library not only supports writing and reading binary data to or from several thousands of -processors into a single or a small number of physical files, but also provides global open -and close functions to access SIONlib files in parallel. This package provides a stripped-down -installation of SIONlib for use with performance tools (e.g., Score-P), with renamed symbols -to avoid conflicts when an application using SIONlib itself is linked against a tool requiring -a different SIONlib version. -""" - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = ['sionlib-%(version)sl.tar.gz'] -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)sl'] - -builddependencies = [ - ('binutils', '2.36.1') -] - -hidden = True - -configopts = '--disable-mic --disable-cxx --disable-fortran --disable-ompi' - -sanity_check_paths = { - 'files': ['bin/sionconfig'] + - ['lib/lib%s_64.a' % x for x in ['lsioncom', 'lsiongen', 'lsionser']], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-gompi-2021.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.7-gompi-2021.eb deleted file mode 100644 index 43599161a46310a1fb04ec558b49d47ccd6d02e6..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-gompi-2021.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.7" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'gompi', 'version': '2021'} - -dependencies = [('SIONfwd', '1.0.1')] - -builddependencies = [('pkg-config', '0.29.2')] - -configopts = '--disable-mic --compiler=gnu --mpi=openmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-gpsmpi-2021.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.7-gpsmpi-2021.eb deleted file mode 100644 index 20fcc00c9a8c918521d16b9231f8f0ef95ab4dc3..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-gpsmpi-2021.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.7" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} - -dependencies = [('SIONfwd', '1.0.1')] - -builddependencies = [('pkg-config', '0.29.2')] - -patches = ['sionlib_psmpi.patch'] - -configopts = '--disable-mic --compiler=gnu --mpi=psmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-iimpi-2021.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.7-iimpi-2021.eb deleted file mode 100644 index ab291bef711a9d64a857833b21462b6bcbc2faa9..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-iimpi-2021.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.7" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2021'} - -dependencies = [('SIONfwd', '1.0.1')] - -builddependencies = [('pkg-config', '0.29.2')] - -configopts = '--disable-mic --compiler=intel --mpi=mpich3 --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-iompi-2021.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.7-iompi-2021.eb deleted file mode 100644 index 325d4ef91d768ba0fec3df890a6a6f0e3f68175e..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-iompi-2021.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.7" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'iompi', 'version': '2021'} - -dependencies = [('SIONfwd', '1.0.1')] - -builddependencies = [('pkg-config', '0.29.2')] - -configopts = '--disable-mic --compiler=intel --mpi=openmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-ipsmpi-2021.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.7-ipsmpi-2021.eb deleted file mode 100644 index 6317cc2c84132a3d1d366d25b98c9d5c89639308..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-ipsmpi-2021.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.7" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} - -dependencies = [('SIONfwd', '1.0.1')] - -builddependencies = [('pkg-config', '0.29.2')] - -patches = ['sionlib_psmpi.patch'] - -configopts = '--disable-mic --compiler=intel --mpi=psmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-npsmpic-2021.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.7-npsmpic-2021.eb deleted file mode 100644 index 6106371152e12162e37484271a2221af68b7a514..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-npsmpic-2021.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.7" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'npsmpic', 'version': '2021'} - -dependencies = [('SIONfwd', '1.0.1')] - -builddependencies = [('pkg-config', '0.29.2')] - -patches = ['sionlib_psmpi.patch'] - -configopts = '--disable-mic --compiler=pgi --mpi=psmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-nvompic-2021.eb b/Golden_Repo/s/SIONlib/SIONlib-1.7.7-nvompic-2021.eb deleted file mode 100644 index ac059532df1d0213cc985afe1ca6ea2b950e6442..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/SIONlib-1.7.7-nvompic-2021.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = "ConfigureMake" -name = "SIONlib" -version = "1.7.7" - -homepage = 'http://www.fz-juelich.de/ias/jsc/EN/Expertise/Support/Software/SIONlib/_node.html' -description = """SIONlib is a scalable I/O library for the parallel access to -task-local files. The library not only supports writing and reading -binary data to or from from several thousands of processors into a -single or a small number of physical files but also provides for -global open and close functions to access SIONlib file in -parallel. SIONlib provides different interfaces: parallel access using -MPI, OpenMP, or their combination and sequential access for -post-processing utilities. -""" - -site_contacts = 'Tom Ridley <t.ridley@fz-juelich.de>' - -toolchain = {'name': 'nvompic', 'version': '2021'} - -dependencies = [('SIONfwd', '1.0.1')] - -builddependencies = [('pkg-config', '0.29.2')] - -configopts = '--disable-mic --compiler=pgi --mpi=openmpi --disable-cxx ' -configopts += '--enable-sionfwd="$EBROOTSIONFWD" CFLAGS="$CFLAGS -fPIC" ' - -source_urls = ['http://apps.fz-juelich.de/jsc/sionlib/download.php?version=%(version)s'] -sources = ['sionlib-%(version)s.tar.gz'] - -sanity_check_paths = { - 'files': [ - "bin/sionconfig", - ("lib64/libsioncom_64.a", "lib/libsionmpi_64.a", "lib64/libsionmpi_64.a"), - ], - 'dirs': [] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/SIONlib/sionlib_psmpi.patch b/Golden_Repo/s/SIONlib/sionlib_psmpi.patch deleted file mode 100644 index 75df0efb09ce8c976b74479f0c0e0880fecd4d83..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SIONlib/sionlib_psmpi.patch +++ /dev/null @@ -1,19 +0,0 @@ -diff -rupN sionlib.orig/mf/mpi-psmpi.def sionlib/mf/mpi-psmpi.def ---- sionlib.orig/mf/mpi-psmpi.def 1970-01-01 01:00:00.000000000 +0100 -+++ sionlib/mf/mpi-psmpi.def 2016-04-29 15:09:02.535739000 +0200 -@@ -0,0 +1,7 @@ -+MPIENABLE = 1 -+MPICC = MPICH_CC="$(CC)" mpicc -+MPICXX = MPICH_CXX="$(CXX)" mpicxx -+MPIF77 = MPICH_FC="$(F77)" mpif77 -+MPIF90 = MPICH_FC="$(F90)" mpif90 -+MPILIB = -+MPITESTRUN = mpiexec -np 4 --envall -diff -rupN sionlib.orig/README sionlib/README ---- sionlib.orig/README 2016-04-29 15:26:26.538865000 +0200 -+++ sionlib/README 2016-04-29 15:26:52.652997000 +0200 -@@ -144,3 +144,4 @@ License: - -------- - - See the file COPYRIGHT in the package base directory for details -+ diff --git a/Golden_Repo/s/SLEPc/SLEPc-3.14.0-gpsmkl-2020.eb b/Golden_Repo/s/SLEPc/SLEPc-3.14.0-gpsmkl-2020.eb deleted file mode 100644 index d4d95ef7bfa000a7fe9446cd1ee4a8a464d15b73..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SLEPc/SLEPc-3.14.0-gpsmkl-2020.eb +++ /dev/null @@ -1,31 +0,0 @@ -name = 'SLEPc' -version = '3.14.0' - -homepage = 'http://slepc.upv.es/' -description = """SLEPc (Scalable Library for Eigenvalue Problem Computations) is a software library for the solution - of large scale sparse eigenvalue problems on parallel computers. It is an extension of PETSc and can be used for - either standard or generalized eigenproblems, with real or complex arithmetic. It can also be used for computing a - partial SVD of a large, sparse, rectangular matrix, and to solve quadratic eigenvalue problems.""" - -examples = 'Examples can be found in $EBROOTSLEPC/share/slepc/examples/' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True, 'openmp': True} - -source_urls = ['http://slepc.upv.es/download/distrib'] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [('PETSc', '3.14')] - -petsc_arch = 'installed-arch-linux2-c-opt' - -modextravars = { - 'SLEPc_ROOT': '%(installdir)s', - 'SLEPcROOT': '%(installdir)s', - 'SLEPc_INCLUDE': '%(installdir)s/include/', - 'SLEPc_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SLEPc/SLEPc-3.14.0-intel-2020.eb b/Golden_Repo/s/SLEPc/SLEPc-3.14.0-intel-2020.eb deleted file mode 100644 index b94d0c34e59314abb237b6e60a6b3b7885df35cd..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SLEPc/SLEPc-3.14.0-intel-2020.eb +++ /dev/null @@ -1,31 +0,0 @@ -name = 'SLEPc' -version = '3.14.0' - -homepage = 'http://slepc.upv.es/' -description = """SLEPc (Scalable Library for Eigenvalue Problem Computations) is a software library for the solution - of large scale sparse eigenvalue problems on parallel computers. It is an extension of PETSc and can be used for - either standard or generalized eigenproblems, with real or complex arithmetic. It can also be used for computing a - partial SVD of a large, sparse, rectangular matrix, and to solve quadratic eigenvalue problems.""" - -examples = 'Examples can be found in $EBROOTSLEPC/share/slepc/examples/' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'usempi': True, 'openmp': True} - -source_urls = ['http://slepc.upv.es/download/distrib'] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [('PETSc', '3.14')] - -petsc_arch = 'installed-arch-linux2-c-opt' - -modextravars = { - 'SLEPc_ROOT': '%(installdir)s', - 'SLEPcROOT': '%(installdir)s', - 'SLEPc_INCLUDE': '%(installdir)s/include/', - 'SLEPc_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SLEPc/SLEPc-3.14.0-intel-para-2020.eb b/Golden_Repo/s/SLEPc/SLEPc-3.14.0-intel-para-2020.eb deleted file mode 100644 index fdc243ceefcf7b781a625048b8c8fd9b0bd85f95..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SLEPc/SLEPc-3.14.0-intel-para-2020.eb +++ /dev/null @@ -1,31 +0,0 @@ -name = 'SLEPc' -version = '3.14.0' - -homepage = 'http://slepc.upv.es/' -description = """SLEPc (Scalable Library for Eigenvalue Problem Computations) is a software library for the solution - of large scale sparse eigenvalue problems on parallel computers. It is an extension of PETSc and can be used for - either standard or generalized eigenproblems, with real or complex arithmetic. It can also be used for computing a - partial SVD of a large, sparse, rectangular matrix, and to solve quadratic eigenvalue problems.""" - -examples = 'Examples can be found in $EBROOTSLEPC/share/slepc/examples/' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True, 'openmp': True} - -source_urls = ['http://slepc.upv.es/download/distrib'] -sources = [SOURCELOWER_TAR_GZ] - -dependencies = [('PETSc', '3.14')] - -petsc_arch = 'installed-arch-linux2-c-opt' - -modextravars = { - 'SLEPc_ROOT': '%(installdir)s', - 'SLEPcROOT': '%(installdir)s', - 'SLEPc_INCLUDE': '%(installdir)s/include/', - 'SLEPc_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SQLite/SQLite-3.32.3-GCCcore-10.3.0.eb b/Golden_Repo/s/SQLite/SQLite-3.32.3-GCCcore-10.3.0.eb deleted file mode 100644 index 6de8b81207e93ba108ba71a6c683cb3ffc962981..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SQLite/SQLite-3.32.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'SQLite' -version = '3.32.3' - -homepage = 'http://www.sqlite.org/' -description = 'SQLite: SQL Database Engine in a C Library' - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -# eg. http://www.sqlite.org/2014/sqlite-autoconf-3080600.tar.gz -source_urls = ['http://www.sqlite.org/2020/'] -local_version_str = '%%(version_major)s%s00' % ''.join('%02d' % int(x) for x in version.split('.')[1:]) -sources = ['sqlite-autoconf-%s.tar.gz' % local_version_str] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('libreadline', '8.0'), - ('Tcl', '8.6.10'), -] - -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/sqlite3', 'include/sqlite3ext.h', 'include/sqlite3.h', - 'lib/libsqlite3.a', 'lib/libsqlite3.%s' % SHLIB_EXT], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/s/SQLite/SQLite-3.32.3-GCCcore-9.3.0.eb b/Golden_Repo/s/SQLite/SQLite-3.32.3-GCCcore-9.3.0.eb deleted file mode 100644 index bc094e1cc9a318d3e96d295d5807854d4463af7b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SQLite/SQLite-3.32.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'SQLite' -version = '3.32.3' - -homepage = 'http://www.sqlite.org/' -description = 'SQLite: SQL Database Engine in a C Library' - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# eg. http://www.sqlite.org/2014/sqlite-autoconf-3080600.tar.gz -source_urls = ['http://www.sqlite.org/2020/'] -local_version_str = '%%(version_major)s%s00' % ''.join('%02d' % int(x) for x in version.split('.')[1:]) -sources = ['sqlite-autoconf-%s.tar.gz' % local_version_str] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('libreadline', '8.0'), - ('Tcl', '8.6.10'), -] - -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/sqlite3', 'include/sqlite3ext.h', 'include/sqlite3.h', - 'lib/libsqlite3.a', 'lib/libsqlite3.%s' % SHLIB_EXT], - 'dirs': ['lib/pkgconfig'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/s/SWIG/SWIG-3.0.12-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/s/SWIG/SWIG-3.0.12-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index c11aebc41a13e2fbe15d07438485b2be19326932..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SWIG/SWIG-3.0.12-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'SWIG' -version = '3.0.12' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.swig.org/' -description = """SWIG is a software development tool that connects programs written in C and C++ with - a variety of high-level programming languages.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True, 'opt': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PCRE', '8.44'), -] - -moduleclass = 'devel' diff --git a/Golden_Repo/s/SWIG/SWIG-3.0.12-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/s/SWIG/SWIG-3.0.12-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 0fc639c1a83be23fd8b36739f2b7e0c65f2ed0a2..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SWIG/SWIG-3.0.12-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'SWIG' -version = '3.0.12' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.swig.org/' -description = """SWIG is a software development tool that connects programs written in C and C++ with - a variety of high-level programming languages.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True, 'opt': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PCRE', '8.44'), -] - -moduleclass = 'devel' diff --git a/Golden_Repo/s/SWIG/SWIG-4.0.2-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/s/SWIG/SWIG-4.0.2-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 8ac6c3fc32c485669b48010573057fd473d03109..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SWIG/SWIG-4.0.2-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'SWIG' -version = '4.0.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.swig.org/' -description = """SWIG is a software development tool that connects programs - written in C and C++ with a variety of high-level programming languages.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True, 'opt': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PCRE', '8.44'), -] - -moduleclass = 'devel' diff --git a/Golden_Repo/s/SWIG/SWIG-4.0.2-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/s/SWIG/SWIG-4.0.2-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 64b1d0d73428ced4d88ff1a46b7ce1d0e2b27db1..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SWIG/SWIG-4.0.2-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,26 +0,0 @@ -name = 'SWIG' -version = '4.0.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.swig.org/' -description = """SWIG is a software development tool that connects programs - written in C and C++ with a variety of high-level programming languages.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True, 'opt': True} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('PCRE', '8.44'), -] - -moduleclass = 'devel' diff --git a/Golden_Repo/s/ScaFaCoS/ScaFaCoS-1.0.1-gpsmpi-2020.eb b/Golden_Repo/s/ScaFaCoS/ScaFaCoS-1.0.1-gpsmpi-2020.eb deleted file mode 100644 index 967671a3c0e377b05daff1746d106de4d7052d44..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/ScaFaCoS/ScaFaCoS-1.0.1-gpsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ScaFaCoS' -version = '1.0.1' - -homepage = 'http://www.scafacos.de/' -description = "ScaFaCoS is a library of scalable fast coulomb solvers." - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/%(namelower)s/%(namelower)s/releases/download/v%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['2b125f313795c81b0e87eb920082e91addf94c17444f9486d979e691aaded99b'] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] -dependencies = [ - ('GMP', '6.2.0'), - ('GSL', '2.6'), - ('FFTW', '3.3.8'), -] - -preconfigopts = "unset F77 && " -configopts = "--enable-shared --enable-static --disable-doc --without-internal-fftw " -configopts += "--with-fftw3-includedir=$EBROOTFFTW/include --with-fftw3-libdir=$EBROOTFFTW/lib " -configopts += "--enable-fcs-solvers=direct,ewald,fmm,p3m " - -sanity_check_paths = { - 'files': ['lib/libfcs.a', 'include/fcs.h', 'include/fcs_module.mod'], - 'dirs': [], -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/ScaFaCoS/ScaFaCoS-1.0.1-ipsmpi-2020.eb b/Golden_Repo/s/ScaFaCoS/ScaFaCoS-1.0.1-ipsmpi-2020.eb deleted file mode 100644 index ce5a5c4399be2e614ec1ea0ebd8f69452b8e6993..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/ScaFaCoS/ScaFaCoS-1.0.1-ipsmpi-2020.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ScaFaCoS' -version = '1.0.1' - -homepage = 'http://www.scafacos.de/' -description = "ScaFaCoS is a library of scalable fast coulomb solvers." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/%(namelower)s/%(namelower)s/releases/download/v%(version)s'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['2b125f313795c81b0e87eb920082e91addf94c17444f9486d979e691aaded99b'] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] -dependencies = [ - ('GMP', '6.2.0'), - ('GSL', '2.6'), - ('FFTW', '3.3.8'), -] - -preconfigopts = "unset F77 && " -configopts = "--enable-shared --enable-static --disable-doc --without-internal-fftw " -configopts += "--with-fftw3-includedir=$EBROOTFFTW/include --with-fftw3-libdir=$EBROOTFFTW/lib " -configopts += "--enable-fcs-solvers=direct,ewald,fmm,p3m " - -sanity_check_paths = { - 'files': ['lib/libfcs.a', 'include/fcs.h', 'include/fcs_module.mod'], - 'dirs': [], -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.5-gompi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.5-gompi-2020.eb deleted file mode 100644 index d1764267159cf2574b65de97a0b8230df75738f4..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.5-gompi-2020.eb +++ /dev/null @@ -1,52 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.5' - -homepage = 'http://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CubeWriter', '4.5'), -] - -dependencies = [ - ('CubeGUI', '4.5'), - ('CubeLib', '4.5'), - ('OTF2', '2.2'), - ('Score-P', '6.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.5-gpsmpi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.5-gpsmpi-2020.eb deleted file mode 100644 index b9e1de982d797316168d673c47e2e46877313238..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.5-gpsmpi-2020.eb +++ /dev/null @@ -1,52 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.5' - -homepage = 'http://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CubeWriter', '4.5'), -] - -dependencies = [ - ('CubeGUI', '4.5'), - ('CubeLib', '4.5'), - ('OTF2', '2.2'), - ('Score-P', '6.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.5-iimpi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.5-iimpi-2020.eb deleted file mode 100644 index c9747be226977e515b6285416dad80c496b162db..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.5-iimpi-2020.eb +++ /dev/null @@ -1,52 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.5' - -homepage = 'http://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CubeWriter', '4.5'), -] - -dependencies = [ - ('CubeGUI', '4.5'), - ('CubeLib', '4.5'), - ('OTF2', '2.2'), - ('Score-P', '6.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.5-ipsmpi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.5-ipsmpi-2020.eb deleted file mode 100644 index 05876cd43639f58427bc0722ece044a98a04a92b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.5-ipsmpi-2020.eb +++ /dev/null @@ -1,52 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.5' - -homepage = 'http://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CubeWriter', '4.5'), -] - -dependencies = [ - ('CubeGUI', '4.5'), - ('CubeLib', '4.5'), - ('OTF2', '2.2'), - ('Score-P', '6.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.5-npsmpic-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.5-npsmpic-2020.eb deleted file mode 100644 index bc9e0675918ae5b7798e88aebb728d7963443143..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.5-npsmpic-2020.eb +++ /dev/null @@ -1,52 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.5' - -homepage = 'http://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CubeWriter', '4.5'), -] - -dependencies = [ - ('CubeGUI', '4.5'), - ('CubeLib', '4.5'), - ('OTF2', '2.2'), - ('Score-P', '6.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.6-gompi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.6-gompi-2020.eb deleted file mode 100644 index 1070167d151f682f79c14efd6ed51c766a649384..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.6-gompi-2020.eb +++ /dev/null @@ -1,55 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'gompi', 'version': '2020'} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeGUI', '4.6'), - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.6-gpsmpi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.6-gpsmpi-2020.eb deleted file mode 100644 index cb5f53f54ca7c43365ee82bafdbb5e00051a6c7d..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.6-gpsmpi-2020.eb +++ /dev/null @@ -1,55 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeGUI', '4.6'), - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.6-iimpi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.6-iimpi-2020.eb deleted file mode 100644 index e628e11ad638988b74643084f13e550e5d8076e2..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.6-iimpi-2020.eb +++ /dev/null @@ -1,55 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeGUI', '4.6'), - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.6-iompi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.6-iompi-2020.eb deleted file mode 100644 index f8bb13707834ead114380e17577fd1a11cfc44cb..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.6-iompi-2020.eb +++ /dev/null @@ -1,55 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'iompi', 'version': '2020'} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeGUI', '4.6'), - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.6-ipsmpi-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.6-ipsmpi-2020.eb deleted file mode 100644 index 13974fe405faefb4b107fd0219f3c20c9cf59f43..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.6-ipsmpi-2020.eb +++ /dev/null @@ -1,55 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeGUI', '4.6'), - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Scalasca/Scalasca-2.6-npsmpic-2020.eb b/Golden_Repo/s/Scalasca/Scalasca-2.6-npsmpic-2020.eb deleted file mode 100644 index 9c68493571991976adc8c9382eb39a8e01fe97ab..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Scalasca/Scalasca-2.6-npsmpic-2020.eb +++ /dev/null @@ -1,55 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'npsmpic', 'version': '2020'} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeGUI', '4.6'), - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/SciPy-Stack/SciPy-Stack-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/s/SciPy-Stack/SciPy-Stack-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index ed8e03118e039aaaf6bc5376d784f6d62f0b0ea0..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SciPy-Stack/SciPy-Stack-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,92 +0,0 @@ -easyblock = 'Bundle' -name = 'SciPy-Stack' -version = '2020' -versionsuffix = '-Python-%(pyver)s' - -local_numpyversion = '1.19.1' - -homepage = 'http://www.scipy.org' -description = """SciPy Stack is a collection of open source software for scientific computing in Python.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('unzip', '6.0'), -] - -########################################################################### -# WARNING: "six" is picked up from GC3Pie if it is loaded, causing an error -# To proceed, unload GC3Pie -########################################################################### - -dependencies = [ - ('Python', '3.8.5'), - ('freetype', '2.10.1'), - ('libpng', '1.6.37'), - ('libreadline', '8.0'), - ('SQLite', '3.32.3'), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -exts_list = [ - ('Cycler', '0.10.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/C/Cycler']), - ('source_tmpl', 'cycler-%(version)s.tar.gz'), - ('modulename', 'cycler'), - ])), - ('mpmath', '1.1.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/m/mpmath']), - ])), - ('numpy', local_numpyversion, dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://github.com/numpy/numpy/releases/download/v%s/' % local_numpyversion]), - ('patches', [ - 'numpy-1.14.2-mkl.patch', - 'numpy-1.14.2-xhost.patch', - ]), - ])), - ('scipy', '1.5.2', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/s/scipy']), - ])), - ('sympy', '1.6.1', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/s/sympy']), - ])), - ('pandas', '1.1.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/pandas']), - ])), - ('kiwisolver', '1.2.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/k/kiwisolver']), - ])), - ('matplotlib', '3.3.1', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/m/matplotlib']), - ])), - ('xarray', '0.16.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/x/xarray']), - ])), - ('seaborn', '0.10.1', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/s/seaborn']), - ])), - ('deap', '1.3.1', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/d/deap']), - ])), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'vis' diff --git a/Golden_Repo/s/SciPy-Stack/SciPy-Stack-2021-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/s/SciPy-Stack/SciPy-Stack-2021-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index 4ae38aca7a9d830f736edbf3a16de781863cadcf..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SciPy-Stack/SciPy-Stack-2021-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,92 +0,0 @@ -easyblock = 'Bundle' -name = 'SciPy-Stack' -version = '2021' -versionsuffix = '-Python-%(pyver)s' - -local_numpyversion = '1.19.1' - -homepage = 'http://www.scipy.org' -description = """SciPy Stack is a collection of open source software for scientific computing in Python.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('unzip', '6.0'), -] - -########################################################################### -# WARNING: "six" is picked up from GC3Pie if it is loaded, causing an error -# To proceed, unload GC3Pie -########################################################################### - -dependencies = [ - ('Python', '3.8.5'), - ('freetype', '2.10.1'), - ('libpng', '1.6.37'), - ('libreadline', '8.0'), - ('SQLite', '3.32.3'), -] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -exts_list = [ - ('Cycler', '0.10.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/C/Cycler']), - ('source_tmpl', 'cycler-%(version)s.tar.gz'), - ('modulename', 'cycler'), - ])), - ('mpmath', '1.1.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/m/mpmath']), - ])), - ('numpy', local_numpyversion, dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://github.com/numpy/numpy/releases/download/v%s/' % local_numpyversion]), - ('patches', [ - 'numpy-1.14.2-mkl.patch', - 'numpy-1.14.2-xhost.patch', - ]), - ])), - ('scipy', '1.5.2', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/s/scipy']), - ])), - ('sympy', '1.6.1', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/s/sympy']), - ])), - ('pandas', '1.1.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/pandas']), - ])), - ('kiwisolver', '1.2.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/k/kiwisolver']), - ])), - ('matplotlib', '3.3.1', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/m/matplotlib']), - ])), - ('xarray', '0.16.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/x/xarray']), - ])), - ('seaborn', '0.10.1', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/s/seaborn']), - ])), - ('deap', '1.3.1', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/d/deap']), - ])), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'vis' diff --git a/Golden_Repo/s/SciPy-Stack/numpy-1.14.2-mkl.patch b/Golden_Repo/s/SciPy-Stack/numpy-1.14.2-mkl.patch deleted file mode 100644 index 4984299a06b64023175ef8fb4ccd7abcb5303ba6..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SciPy-Stack/numpy-1.14.2-mkl.patch +++ /dev/null @@ -1,53 +0,0 @@ -diff -ruN numpy-1.14.2.orig/numpy/distutils/fcompiler/__init__.py numpy-1.14.2/numpy/distutils/fcompiler/__init__.py ---- numpy-1.14.2.orig/numpy/distutils/fcompiler/__init__.py 2018-03-01 01:03:27.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/fcompiler/__init__.py 2018-04-19 15:33:19.288258230 +0200 -@@ -625,7 +625,10 @@ - return options - - def library_option(self, lib): -- return "-l" + lib -+ if lib[0]=='-': -+ return lib -+ else: -+ return "-l" + lib - def library_dir_option(self, dir): - return "-L" + dir - -diff -ruN numpy-1.14.2.orig/numpy/distutils/system_info.py numpy-1.14.2/numpy/distutils/system_info.py ---- numpy-1.14.2.orig/numpy/distutils/system_info.py 2018-03-12 16:49:53.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/system_info.py 2018-04-19 15:35:50.127908579 +0200 -@@ -727,7 +727,7 @@ - if is_string(default): - return [default] - return default -- return [b for b in [a.strip() for a in libs.split(',')] if b] -+ return [b for b in [a.strip().replace(':',',') for a in libs.split(',')] if b] - - def get_libraries(self, key='libraries'): - if hasattr(self, '_lib_names'): -@@ -812,6 +812,9 @@ - # make sure we preserve the order of libs, as it can be important - found_dirs, found_libs = [], [] - for lib in libs: -+ if lib[0] == '-': -+ found_libs.append(lib) -+ continue - for lib_dir in lib_dirs: - found_lib = self._find_lib(lib_dir, lib, exts) - if found_lib: -diff -ruN numpy-1.14.2.orig/numpy/distutils/unixccompiler.py numpy-1.14.2/numpy/distutils/unixccompiler.py ---- numpy-1.14.2.orig/numpy/distutils/unixccompiler.py 2018-03-01 01:03:27.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/unixccompiler.py 2018-04-19 15:37:42.892095000 +0200 -@@ -136,3 +136,12 @@ - - replace_method(UnixCCompiler, 'create_static_lib', - UnixCCompiler_create_static_lib) -+ -+def UnixCCompiler_library_option(self, lib): -+ if lib[0]=='-': -+ return lib -+ else: -+ return "-l" + lib -+ -+replace_method(UnixCCompiler, 'library_option', -+ UnixCCompiler_library_option) diff --git a/Golden_Repo/s/SciPy-Stack/numpy-1.14.2-xhost.patch b/Golden_Repo/s/SciPy-Stack/numpy-1.14.2-xhost.patch deleted file mode 100644 index 5c4519f2fbabf5f8741c799078335658ec641d82..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SciPy-Stack/numpy-1.14.2-xhost.patch +++ /dev/null @@ -1,24 +0,0 @@ -diff -ruN numpy-1.14.2.orig/numpy/distutils/fcompiler/intel.py numpy-1.14.2/numpy/distutils/fcompiler/intel.py ---- numpy-1.14.2.orig/numpy/distutils/fcompiler/intel.py 2018-03-01 01:03:27.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/fcompiler/intel.py 2018-04-19 15:30:13.945734000 +0200 -@@ -61,7 +61,7 @@ - return ['-fp-model strict -O1 -{}'.format(mpopt)] - - def get_flags_arch(self): -- return [] -+ return ['-xHost'] - - def get_flags_linker_so(self): - opt = FCompiler.get_flags_linker_so(self) -diff -ruN numpy-1.14.2.orig/numpy/distutils/intelccompiler.py numpy-1.14.2/numpy/distutils/intelccompiler.py ---- numpy-1.14.2.orig/numpy/distutils/intelccompiler.py 2018-03-01 01:03:27.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/intelccompiler.py 2018-04-19 15:30:43.659844000 +0200 -@@ -61,7 +61,7 @@ - v = self.get_version() - mpopt = 'openmp' if v and v < '15' else 'qopenmp' - self.cc_exe = ('icc -m64 -fPIC -fp-model strict -O3 ' -- '-fomit-frame-pointer -{}').format(mpopt) -+ '-fomit-frame-pointer -xHost -{}').format(mpopt) - compiler = self.cc_exe - - if platform.system() == 'Darwin': diff --git a/Golden_Repo/s/Score-P/Score-P-6.0-gompi-2020.eb b/Golden_Repo/s/Score-P/Score-P-6.0-gompi-2020.eb deleted file mode 100644 index 7037c78bff8437068491567a8832a1878eb47879..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-6.0-gompi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '6.0' - -homepage = 'http://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/scorep/'] -sources = ['scorep-%(version)s.tar.gz'] -patches = [ - 'Score-P-6.0_binutils_2.34_api_change.patch', - 'Score-P-6.0_no_PDT_for_CUDA.patch' -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.5'), - ('CubeWriter', '4.5'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.5'), - ('OTF2', '2.2'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-6.0-gpsmpi-2020.eb b/Golden_Repo/s/Score-P/Score-P-6.0-gpsmpi-2020.eb deleted file mode 100644 index a654979fef2bcd3fbb7b26bbc8e18c5d916e231a..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-6.0-gpsmpi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '6.0' - -homepage = 'http://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/scorep/'] -sources = ['scorep-%(version)s.tar.gz'] -patches = [ - 'Score-P-6.0_binutils_2.34_api_change.patch', - 'Score-P-6.0_no_PDT_for_CUDA.patch' -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.5'), - ('CubeWriter', '4.5'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.5'), - ('OTF2', '2.2'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-6.0-iimpi-2020.eb b/Golden_Repo/s/Score-P/Score-P-6.0-iimpi-2020.eb deleted file mode 100644 index 1dd32b425e26a853984b4c775d2176860f59cbd1..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-6.0-iimpi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '6.0' - -homepage = 'http://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/scorep/'] -sources = ['scorep-%(version)s.tar.gz'] -patches = [ - 'Score-P-6.0_binutils_2.34_api_change.patch', - 'Score-P-6.0_no_PDT_for_CUDA.patch' -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.5'), - ('CubeWriter', '4.5'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.5'), - ('OTF2', '2.2'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-6.0-ipsmpi-2020.eb b/Golden_Repo/s/Score-P/Score-P-6.0-ipsmpi-2020.eb deleted file mode 100644 index 79c00c01536bd236ca672c9a78aeebb827ad6f59..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-6.0-ipsmpi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '6.0' - -homepage = 'http://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/scorep/'] -sources = ['scorep-%(version)s.tar.gz'] -patches = [ - 'Score-P-6.0_binutils_2.34_api_change.patch', - 'Score-P-6.0_no_PDT_for_CUDA.patch' -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.5'), - ('CubeWriter', '4.5'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.5'), - ('OTF2', '2.2'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-6.0-npsmpic-2020.eb b/Golden_Repo/s/Score-P/Score-P-6.0-npsmpic-2020.eb deleted file mode 100644 index eb958fb47f5ac34bbe54a24869875ab2e0970bf2..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-6.0-npsmpic-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '6.0' - -homepage = 'http://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'npsmpic', 'version': '2020'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/scorep/'] -sources = ['scorep-%(version)s.tar.gz'] -patches = [ - 'Score-P-6.0_binutils_2.34_api_change.patch', - 'Score-P-6.0_no_PDT_for_CUDA.patch' -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.5'), - ('CubeWriter', '4.5'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.5'), - ('OTF2', '2.2'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-6.0_binutils_2.34_api_change.patch b/Golden_Repo/s/Score-P/Score-P-6.0_binutils_2.34_api_change.patch deleted file mode 100644 index 528ce9c44311a9896ef8c55111d3752c71a79be4..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-6.0_binutils_2.34_api_change.patch +++ /dev/null @@ -1,37 +0,0 @@ -Support libbfd API change introduced with binutils 2.34. Patch backported -from upstream repo; fix will be included in releases after v6.0. ---- a/src/services/unwinding/scorep_unwinding_unify.c -+++ b/src/services/unwinding/scorep_unwinding_unify.c -@@ -28,6 +28,19 @@ - - #if HAVE( LIBBFD ) - #include <bfd.h> -+ -+#ifndef bfd_get_section_size -+#define bfd_get_section_size( asection ) bfd_section_size( asection ) -+#endif -+ -+#ifndef bfd_get_section_vma -+#define bfd_get_section_vma( abfd, asection ) bfd_section_vma( asection ) -+#endif -+ -+#ifndef bfd_get_section_flags -+#define bfd_get_section_flags( abfd, asection ) bfd_section_flags( asection ) -+#endif -+ - #endif - - #include "scorep_unwinding_region.h" ---- a/src/adapters/compiler/scorep_compiler_symbol_table_libbfd.c -+++ b/src/adapters/compiler/scorep_compiler_symbol_table_libbfd.c -@@ -43,6 +43,10 @@ - - #include <bfd.h> - -+#ifndef bfd_get_section -+#define bfd_get_section( asymbol ) bfd_asymbol_section( asymbol ) -+#endif -+ - #include <UTILS_Error.h> - #define SCOREP_DEBUG_MODULE_NAME COMPILER - #include <UTILS_Debug.h> diff --git a/Golden_Repo/s/Score-P/Score-P-6.0_no_PDT_for_CUDA.patch b/Golden_Repo/s/Score-P/Score-P-6.0_no_PDT_for_CUDA.patch deleted file mode 100644 index 7081ac5e01c491de0b64fc567dcb8340d4824920..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-6.0_no_PDT_for_CUDA.patch +++ /dev/null @@ -1,15 +0,0 @@ -diff --git a/src/tools/instrumenter/scorep_instrumenter_pdt.cpp b/src/tools/instrumenter/scorep_instrumenter_pdt.cpp -index da7ed2fae1a1e7be3f73f7d526baa0648484c0ce..ed26977b8824572803d7e6e78e473eff6bfcdc69 100644 ---- a/src/tools/instrumenter/scorep_instrumenter_pdt.cpp -+++ b/src/tools/instrumenter/scorep_instrumenter_pdt.cpp -@@ -74,6 +74,10 @@ SCOREP_Instrumenter_PdtAdapter::precompile( SCOREP_Instrumenter& instrum - SCOREP_Instrumenter_CmdLine& cmdLine, - const std::string& source_file ) - { -+ if ( is_cuda_file( source_file ) ) -+ { -+ return source_file; -+ } - std::string extension = get_extension( source_file ); - if ( is_fortran_file( source_file ) ) - { diff --git a/Golden_Repo/s/Score-P/Score-P-7.0-gompi-2020.eb b/Golden_Repo/s/Score-P/Score-P-7.0-gompi-2020.eb deleted file mode 100644 index 273004d9f7b35918193b1eef2e09ddfd20b9e958..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-7.0-gompi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'gompi', 'version': '2020'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared --with-machine-name=$SYSTEMNAME ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-7.0-gpsmpi-2020.eb b/Golden_Repo/s/Score-P/Score-P-7.0-gpsmpi-2020.eb deleted file mode 100644 index 0859927282e2cf57bddca5ef2454997cf13d76b8..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-7.0-gpsmpi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared --with-machine-name=$SYSTEMNAME ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-7.0-iimpi-2020.eb b/Golden_Repo/s/Score-P/Score-P-7.0-iimpi-2020.eb deleted file mode 100644 index 6d377d885fa9c85419c6496cda59c6886e87390f..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-7.0-iimpi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared --with-machine-name=$SYSTEMNAME ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-7.0-iompi-2020.eb b/Golden_Repo/s/Score-P/Score-P-7.0-iompi-2020.eb deleted file mode 100644 index 592032d1e95393370b682fcdf1548611693a7955..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-7.0-iompi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'iompi', 'version': '2020'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared --with-machine-name=$SYSTEMNAME ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-7.0-ipsmpi-2020.eb b/Golden_Repo/s/Score-P/Score-P-7.0-ipsmpi-2020.eb deleted file mode 100644 index 17ea67b5392ee433928f824dfbf52f7e3dd86657..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-7.0-ipsmpi-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared --with-machine-name=$SYSTEMNAME ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Score-P/Score-P-7.0-npsmpic-2020.eb b/Golden_Repo/s/Score-P/Score-P-7.0-npsmpic-2020.eb deleted file mode 100644 index 88905b8d102c0f56d94956fd204f71ca6f0a0776..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Score-P/Score-P-7.0-npsmpic-2020.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'npsmpic', 'version': '2020'} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared --with-machine-name=$SYSTEMNAME ' -# Enable CUDA support -configopts += '--with-libOpenCL=$EBROOTCUDA/targets/x86_64-linux' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Golden_Repo/s/Serf/Serf-1.3.9-GCCcore-10.3.0.eb b/Golden_Repo/s/Serf/Serf-1.3.9-GCCcore-10.3.0.eb deleted file mode 100644 index 1b9321d05973988df758cb453b9e56822869bee4..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Serf/Serf-1.3.9-GCCcore-10.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'SCons' -name = 'Serf' -version = '1.3.9' - -homepage = 'http://serf.apache.org/' -description = """The serf library is a high performance C-based HTTP client library - built upon the Apache Portable Runtime (APR) library -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://archive.apache.org/dist/%(namelower)s'] -sources = [SOURCELOWER_TAR_BZ2] -patches = ['Serf-%(version)s_python3_scons.patch'] - -builddependencies = [ - ('SCons', '3.1.2'), - ('Python', '3.8.5'), - ('binutils', '2.36.1'), -] - -dependencies = [ - ('APR', '1.7.0'), - ('APR-util', '1.6.1'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -buildopts = "APR=$EBROOTAPR/bin/apr-1-config APU=$EBROOTAPRMINUTIL/bin/apu-1-config" - -sanity_check_paths = { - 'files': ['include/serf-1/serf.h'] + - ['lib/libserf-1.%s' % x for x in ['a', 'so']], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/Serf/Serf-1.3.9-GCCcore-9.3.0.eb b/Golden_Repo/s/Serf/Serf-1.3.9-GCCcore-9.3.0.eb deleted file mode 100644 index 8bd2d5a590d4822a4cfc7dd7ad616347647e98af..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Serf/Serf-1.3.9-GCCcore-9.3.0.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'SCons' -name = 'Serf' -version = '1.3.9' - -homepage = 'http://serf.apache.org/' -description = """The serf library is a high performance C-based HTTP client library - built upon the Apache Portable Runtime (APR) library -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://archive.apache.org/dist/%(namelower)s'] -sources = [SOURCELOWER_TAR_BZ2] - -builddependencies = [ - ('SCons', '3.1.2'), - ('binutils', '2.34'), -] - -dependencies = [ - ('APR', '1.7.0'), - ('APR-util', '1.6.1'), -] - -osdependencies = [('openssl-devel', 'libssl-dev', 'libopenssl-devel')] - -buildopts = "APR=$EBROOTAPR/bin/apr-1-config APU=$EBROOTAPRMINUTIL/bin/apu-1-config" - -sanity_check_paths = { - 'files': ['include/serf-1/serf.h'] + - ['lib/libserf-1.%s' % x for x in ['a', 'so']], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/Serf/Serf-1.3.9_python3_scons.patch b/Golden_Repo/s/Serf/Serf-1.3.9_python3_scons.patch deleted file mode 100644 index fd3f314495963fbb1c707ae021598e9d9e0091d3..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Serf/Serf-1.3.9_python3_scons.patch +++ /dev/null @@ -1,22 +0,0 @@ -# Makes scons script compatible with python3 -# Author: Mikael Öhman <micketeer@gmail.com> ---- SConstruct.orig 2020-10-19 00:08:59.165775221 +0200 -+++ SConstruct 2020-10-19 00:29:58.378134064 +0200 -@@ -166,7 +166,7 @@ - match = re.search('SERF_MAJOR_VERSION ([0-9]+).*' - 'SERF_MINOR_VERSION ([0-9]+).*' - 'SERF_PATCH_VERSION ([0-9]+)', -- env.File('serf.h').get_contents(), -+ str(env.File('serf.h').get_contents()), - re.DOTALL) - MAJOR, MINOR, PATCH = [int(x) for x in match.groups()] - env.Append(MAJOR=str(MAJOR)) -@@ -183,7 +183,7 @@ - - unknown = opts.UnknownVariables() - if unknown: -- print 'Warning: Used unknown variables:', ', '.join(unknown.keys()) -+ print('Warning: Used unknown variables:', ', '.join(unknown.keys())) - - apr = str(env['APR']) - apu = str(env['APU']) diff --git a/Golden_Repo/s/Shapely/Shapely-1.7.1-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/s/Shapely/Shapely-1.7.1-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 8df26f8973febb433ea6b7510a090143cd5115eb..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Shapely/Shapely-1.7.1-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'Shapely' -version = '1.7.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/Toblerity/Shapely' -description = """Shapely is a BSD-licensed Python package for manipulation and analysis of planar geometric objects. -It is based on the widely deployed GEOS (the engine of PostGIS) and JTS (from which GEOS is ported) libraries.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129'] - -dependencies = [ - ('Python', '3.8.5'), - ('GEOS', '3.8.1', versionsuffix), -] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -moduleclass = 'math' diff --git a/Golden_Repo/s/Shapely/Shapely-1.7.1-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/s/Shapely/Shapely-1.7.1-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 47db47fc91daae2c2c102039da8f217b7a6dd211..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Shapely/Shapely-1.7.1-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'Shapely' -version = '1.7.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/Toblerity/Shapely' -description = """Shapely is a BSD-licensed Python package for manipulation and analysis of planar geometric objects. -It is based on the widely deployed GEOS (the engine of PostGIS) and JTS (from which GEOS is ported) libraries.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] -checksums = ['1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129'] - -dependencies = [ - ('Python', '3.8.5'), - ('GEOS', '3.8.1', versionsuffix), -] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -moduleclass = 'math' diff --git a/Golden_Repo/s/Siesta/Siesta-4.0.2-gpsmkl-2020.eb b/Golden_Repo/s/Siesta/Siesta-4.0.2-gpsmkl-2020.eb deleted file mode 100644 index 78e6e5728c9c7c4161bf8d97b87b2b544de31050..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Siesta/Siesta-4.0.2-gpsmkl-2020.eb +++ /dev/null @@ -1,18 +0,0 @@ -name = 'Siesta' -version = '4.0.2' - -homepage = 'http://departments.icmab.es/leem/siesta' -description = """SIESTA is both a method and its computer program implementation, to perform efficient electronic - structure calculations and ab initio molecular dynamics simulations of molecules and solids.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['https://launchpad.net/siesta/%(version_major_minor)s/%(version)s/+download'] - -dependencies = [('netCDF-Fortran', '4.5.3')] - -moduleclass = 'phys' diff --git a/Golden_Repo/s/Siesta/Siesta-4.0.2-intel-2020.eb b/Golden_Repo/s/Siesta/Siesta-4.0.2-intel-2020.eb deleted file mode 100644 index 7501aec23964d638373e30e23b13ab1000f50bbf..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Siesta/Siesta-4.0.2-intel-2020.eb +++ /dev/null @@ -1,18 +0,0 @@ -name = 'Siesta' -version = '4.0.2' - -homepage = 'http://departments.icmab.es/leem/siesta' -description = """SIESTA is both a method and its computer program implementation, to perform efficient electronic - structure calculations and ab initio molecular dynamics simulations of molecules and solids.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['https://launchpad.net/siesta/%(version_major_minor)s/%(version)s/+download'] - -dependencies = [('netCDF-Fortran', '4.5.3')] - -moduleclass = 'phys' diff --git a/Golden_Repo/s/Siesta/Siesta-4.0.2-intel-para-2020.eb b/Golden_Repo/s/Siesta/Siesta-4.0.2-intel-para-2020.eb deleted file mode 100644 index 5f385c2adab5d152ba3c04da1da5def5371a73a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Siesta/Siesta-4.0.2-intel-para-2020.eb +++ /dev/null @@ -1,18 +0,0 @@ -name = 'Siesta' -version = '4.0.2' - -homepage = 'http://departments.icmab.es/leem/siesta' -description = """SIESTA is both a method and its computer program implementation, to perform efficient electronic - structure calculations and ab initio molecular dynamics simulations of molecules and solids.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['https://launchpad.net/siesta/%(version_major_minor)s/%(version)s/+download'] - -dependencies = [('netCDF-Fortran', '4.5.3')] - -moduleclass = 'phys' diff --git a/Golden_Repo/s/Silo/Silo-4.10.2-gpsmpi-2020.eb b/Golden_Repo/s/Silo/Silo-4.10.2-gpsmpi-2020.eb deleted file mode 100644 index 215f8b08c0df2373bb98b7187ab056de5c91e7b4..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Silo/Silo-4.10.2-gpsmpi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Silo' -version = '4.10.2' - -homepage = 'https://wci.llnl.gov/simulation/computer-codes/silo/' -description = 'Silo is a library for reading and writing a wide variety of scientific data to binary, disk files.' - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True} - -sources = ['%(namelower)s-%(version)s.tar.gz'] -source_urls = ['https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-%(version)s'] - -patches = ['silo_hdf5_1.8.13.patch'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('Szip', '2.1.1') -] - -configopts = [ - '--with-hdf5=$EBROOTHDF5/include,$EBROOTHDF5/lib --with-szlib=$EBROOTSZIP/lib --enable-shared --disable-silex', - '--with-szlib=$EBROOTSZIP/lib --enable-shared --disable-silex', -] - -sanity_check_paths = { - 'files': ['bin/browser', 'bin/silock', 'bin/silodiff', 'bin/silofile', 'lib/libsilo.a', 'lib/libsiloh5.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/s/Silo/Silo-4.10.2-iimpi-2020.eb b/Golden_Repo/s/Silo/Silo-4.10.2-iimpi-2020.eb deleted file mode 100644 index e16975b17bf4d16a21f55834df1c168281c29c3b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Silo/Silo-4.10.2-iimpi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Silo' -version = '4.10.2' - -homepage = 'https://wci.llnl.gov/simulation/computer-codes/silo/' -description = 'Silo is a library for reading and writing a wide variety of scientific data to binary, disk files.' - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True} - -sources = ['%(namelower)s-%(version)s.tar.gz'] -source_urls = ['https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-%(version)s'] - -patches = ['silo_hdf5_1.8.13.patch'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('Szip', '2.1.1') -] - -configopts = [ - '--with-hdf5=$EBROOTHDF5/include,$EBROOTHDF5/lib --with-szlib=$EBROOTSZIP/lib --enable-shared --disable-silex', - '--with-szlib=$EBROOTSZIP/lib --enable-shared --disable-silex', -] - -sanity_check_paths = { - 'files': ['bin/browser', 'bin/silock', 'bin/silodiff', 'bin/silofile', 'lib/libsilo.a', 'lib/libsiloh5.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/s/Silo/Silo-4.10.2-ipsmpi-2020.eb b/Golden_Repo/s/Silo/Silo-4.10.2-ipsmpi-2020.eb deleted file mode 100644 index f2233935943862a9f2c960872248c7575f0ef416..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Silo/Silo-4.10.2-ipsmpi-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Silo' -version = '4.10.2' - -homepage = 'https://wci.llnl.gov/simulation/computer-codes/silo/' -description = 'Silo is a library for reading and writing a wide variety of scientific data to binary, disk files.' - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True} - -sources = ['%(namelower)s-%(version)s.tar.gz'] -source_urls = ['https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-%(version)s'] - -patches = ['silo_hdf5_1.8.13.patch'] - -dependencies = [ - ('HDF5', '1.10.6'), - ('Szip', '2.1.1') -] - -configopts = [ - '--with-hdf5=$EBROOTHDF5/include,$EBROOTHDF5/lib --with-szlib=$EBROOTSZIP/lib --enable-shared --disable-silex', - '--with-szlib=$EBROOTSZIP/lib --enable-shared --disable-silex', -] - -sanity_check_paths = { - 'files': ['bin/browser', 'bin/silock', 'bin/silodiff', 'bin/silofile', 'lib/libsilo.a', 'lib/libsiloh5.a'], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/s/Silo/silo_hdf5_1.8.13.patch b/Golden_Repo/s/Silo/silo_hdf5_1.8.13.patch deleted file mode 100644 index f4ac6336fae8c3d32aa7b989b2369875728c6139..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Silo/silo_hdf5_1.8.13.patch +++ /dev/null @@ -1,21 +0,0 @@ -diff -rupN silo-4.10.2.orig/src/hdf5_drv/silo_hdf5.c silo-4.10.2/src/hdf5_drv/silo_hdf5.c ---- silo-4.10.2.orig/src/hdf5_drv/silo_hdf5.c 2014-09-30 01:34:54.000000000 +0200 -+++ silo-4.10.2/src/hdf5_drv/silo_hdf5.c 2016-04-26 11:23:52.382400735 +0200 -@@ -4755,7 +4755,7 @@ db_hdf5_process_file_options(int opts_se - case DB_FILE_OPTS_H5_DEFAULT_MPIP: - { - #ifdef H5_HAVE_PARALLEL -- h5status |= H5Pset_fapl_mpiposix(retval, MPI_COMM_SELF, TRUE); -+ h5status |= H5Pset_fapl_mpio(retval, MPI_COMM_SELF, TRUE); - #else - H5Pclose(retval); - return db_perror("HDF5 MPI VFD", E_NOTENABLEDINBUILD, me); -@@ -5076,7 +5076,7 @@ db_hdf5_process_file_options(int opts_se - } - else - { -- h5status |= H5Pset_fapl_mpiposix(retval, mpi_comm, use_gpfs_hints); -+ h5status |= H5Pset_fapl_mpio(retval, mpi_comm, use_gpfs_hints); - } - #else - H5Pclose(retval); diff --git a/Golden_Repo/s/Singularity-Tools/Singularity-Tools-2020-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/s/Singularity-Tools/Singularity-Tools-2020-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index 0f29a6d749a1444e495aaf0759b4e3c9c1e4381a..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Singularity-Tools/Singularity-Tools-2020-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,57 +0,0 @@ -easyblock = 'Bundle' -name = 'Singularity-Tools' -version = '2020' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://gitlab.version.fz-juelich.de/hps-public/container-build-system-cli' -description = """Singularity-Tools contain a bunch of tools for Singularity, - e.g. the JSC Build System CLI or singularity-compose. -""" - -site_contacts = 'Ruben Simons <r.simons@fz-juelich.de>,Benedikt von St. Vieth <b.von.st.vieth@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -dependencies = [('Python', '3.8.5')] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -exts_list = [ - ('pyaml', '20.4.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/pyaml']), - ])), - ('semver', '2.10.2', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/semver']), - ])), - ('spython', '0.0.85', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/spython']), - ])), - ('singularity-compose', '0.0.20', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/singularity-compose']), - ('modulename', 'scompose'), - ])), - ('tabulate', '0.8.7', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/tabulate']), - ])), - ('sib', '0.0.1', dict(list(local_common_opts.items()) + [ - ('sources', ['client-0.0.1.tar.gz']), - ('modulename', 'singularitydb_client'), - ])), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/Singularity-Tools/Singularity-Tools-2020-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/s/Singularity-Tools/Singularity-Tools-2020-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 91417b456221ef221452fbe4bbf7734f294850e0..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Singularity-Tools/Singularity-Tools-2020-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,57 +0,0 @@ -easyblock = 'Bundle' -name = 'Singularity-Tools' -version = '2020' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://gitlab.version.fz-juelich.de/hps-public/container-build-system-cli' -description = """Singularity-Tools contain a bunch of tools for Singularity, - e.g. the JSC Build System CLI or singularity-compose. -""" - -site_contacts = 'Ruben Simons <r.simons@fz-juelich.de>,Benedikt von St. Vieth <b.von.st.vieth@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -dependencies = [('Python', '3.8.5')] - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -exts_list = [ - ('pyaml', '20.4.0', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/pyaml']), - ])), - ('semver', '2.10.2', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/semver']), - ])), - ('spython', '0.0.85', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/spython']), - ])), - ('singularity-compose', '0.0.20', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/singularity-compose']), - ('modulename', 'scompose'), - ])), - ('tabulate', '0.8.7', dict(list(local_common_opts.items()) + [ - ('source_urls', ['https://pypi.python.org/packages/source/p/tabulate']), - ])), - ('sib', '0.0.1', dict(list(local_common_opts.items()) + [ - ('sources', ['client-0.0.1.tar.gz']), - ('modulename', 'singularitydb_client'), - ])), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/SoX/SoX-14.4.2-GCCcore-10.3.0.eb b/Golden_Repo/s/SoX/SoX-14.4.2-GCCcore-10.3.0.eb deleted file mode 100644 index d10ae4604b2a3924b48d0fd00e9a8153a5ec5e12..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SoX/SoX-14.4.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'SoX' -version = '14.4.2' - -homepage = 'https://sourceforge.net/projects/sox' -description = """SoX is the Swiss Army Knife of sound processing utilities. It can convert audio files - to other popular audio file types and also apply sound effects and filters during the conversion.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['b45f598643ffbd8e363ff24d61166ccec4836fea6d3888881b8df53e3bb55f6c'] - -builddependencies = { - ('binutils', '2.36.1') -} - -sanity_check_paths = { - 'files': ['bin/play', 'bin/rec', 'bin/sox', 'bin/soxi', 'include/sox.h', - 'lib/libsox.a', 'lib/libsox.%s' % SHLIB_EXT, 'lib/pkgconfig/sox.pc'], - 'dirs': ['share/man'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/SoX/SoX-14.4.2-GCCcore-9.3.0.eb b/Golden_Repo/s/SoX/SoX-14.4.2-GCCcore-9.3.0.eb deleted file mode 100644 index 18a9acd580e17512f08f6b3fff7186aea426d971..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SoX/SoX-14.4.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'SoX' -version = '14.4.2' - -homepage = 'https://sourceforge.net/projects/sox' -description = """SoX is the Swiss Army Knife of sound processing utilities. It can convert audio files - to other popular audio file types and also apply sound effects and filters during the conversion.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [SOURCEFORGE_SOURCE] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['b45f598643ffbd8e363ff24d61166ccec4836fea6d3888881b8df53e3bb55f6c'] - -builddependencies = { - ('binutils', '2.34') -} - -sanity_check_paths = { - 'files': ['bin/play', 'bin/rec', 'bin/sox', 'bin/soxi', 'include/sox.h', - 'lib/libsox.a', 'lib/libsox.%s' % SHLIB_EXT, 'lib/pkgconfig/sox.pc'], - 'dirs': ['share/man'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/Subversion/Subversion-1.14.0-GCCcore-10.3.0.eb b/Golden_Repo/s/Subversion/Subversion-1.14.0-GCCcore-10.3.0.eb deleted file mode 100644 index acdb6624ee4eced3f3e84dacdcd83d79be7dc9b3..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Subversion/Subversion-1.14.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Subversion' -version = '1.14.0' - -homepage = 'http://subversion.apache.org/' -description = """Subversion is an open source version control system.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'http://apache.belnet.be/%(namelower)s', - 'http://www.eu.apache.org/dist/%(namelower)s', - 'http://www.us.apache.org/dist/%(namelower)s', -] -sources = [SOURCELOWER_TAR_BZ2] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('APR', '1.7.0'), - ('APR-util', '1.6.1'), - ('SQLite', '3.32.3'), - ('zlib', '1.2.11'), - ('Serf', '1.3.9'), -] - -configopts = "--with-apr=$EBROOTAPR/bin/apr-1-config --with-apr-util=$EBROOTAPRMINUTIL/bin/apu-1-config " -configopts += "--with-zlib=$EBROOTZLIB --with-serf=$EBROOTSERF --with-lz4=internal --with-utf8proc=internal" - -sanity_check_paths = { - 'files': ["bin/svn", "bin/svnversion"], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/Subversion/Subversion-1.14.0-GCCcore-9.3.0.eb b/Golden_Repo/s/Subversion/Subversion-1.14.0-GCCcore-9.3.0.eb deleted file mode 100644 index 6f2d99c35cf1e02b34ed9252fff03f1d27c46634..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Subversion/Subversion-1.14.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Subversion' -version = '1.14.0' - -homepage = 'http://subversion.apache.org/' -description = """Subversion is an open source version control system.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [ - 'http://apache.belnet.be/%(namelower)s', - 'http://www.eu.apache.org/dist/%(namelower)s', - 'http://www.us.apache.org/dist/%(namelower)s', -] -sources = [SOURCELOWER_TAR_BZ2] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('APR', '1.7.0'), - ('APR-util', '1.6.1'), - ('SQLite', '3.32.3'), - ('zlib', '1.2.11'), - ('Serf', '1.3.9'), -] - -configopts = "--with-apr=$EBROOTAPR/bin/apr-1-config --with-apr-util=$EBROOTAPRMINUTIL/bin/apu-1-config " -configopts += "--with-zlib=$EBROOTZLIB --with-serf=$EBROOTSERF --with-lz4=internal --with-utf8proc=internal" - -sanity_check_paths = { - 'files': ["bin/svn", "bin/svnversion"], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gomkl-2020-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gomkl-2020-CUDA.eb deleted file mode 100644 index 063a4613742444b7a9eb22798d99045ad0a3e75f..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gomkl-2020-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gomkl', 'version': '2020'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gomkl-2021-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gomkl-2021-CUDA.eb deleted file mode 100644 index 9c3a96c86455d2cbeed4dc2ea6ebcc3e588a8bc7..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gomkl-2021-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.3', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gpsmkl-2020-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gpsmkl-2020-CUDA.eb deleted file mode 100644 index f1967a258d1284fff27ef595cf6fa6867a487b13..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gpsmkl-2020-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gpsmkl-2021-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gpsmkl-2021-CUDA.eb deleted file mode 100644 index 5d91521eaa3091f7b9424c3a90d3a904d8ffa95e..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-gpsmkl-2021-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.3', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-2020-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-2020-CUDA.eb deleted file mode 100644 index d5a7fd12d6b81e2402f2f5858f104e88f590dbcb..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-2020-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-2021-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-2021-CUDA.eb deleted file mode 100644 index e0144c78145fd4c95f6689091cc53894cf4ed59d..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-2021-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.3', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-para-2020-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-para-2020-CUDA.eb deleted file mode 100644 index f0e09dddfd43a483cc6e247b9252f4e1fc95430d..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-para-2020-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-para-2021-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-para-2021-CUDA.eb deleted file mode 100644 index 37a64d94c3313b8f6b7efdad2eb121ed19e84233..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-intel-para-2021-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.3', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-iomkl-2020-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-iomkl-2020-CUDA.eb deleted file mode 100644 index 160c90e79e8b1fc5b3f6102e02007a704191b028..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-iomkl-2020-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iomkl', 'version': '2020'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0') -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-iomkl-2021-CUDA.eb b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-iomkl-2021-CUDA.eb deleted file mode 100644 index 56d9dde3b77d6d522c59bb29be54b59077785da5..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1-iomkl-2021-CUDA.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'SuiteSparse' -version = '5.7.1' -versionsuffix = '-CUDA' - -homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html' -description = "SuiteSparse is a collection of libraries manipulate sparse matrices." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'opt': True, 'unroll': True, 'pic': True} - -source_urls = ['https://github.com/DrTimothyAldenDavis/SuiteSparse/archive'] -sources = ['v%(version)s.tar.gz'] - -dependencies = [ - ('CUDA', '11.3', '', SYSTEM), - ('METIS', '5.1.0'), -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -patches = [ - 'SuiteSparse-5.7.1_JSC_GPUS.patch' -] - -buildopts = 'NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - - -# make sure that bin/demo can find libsuitesparseconfig.so.5 during build -prebuildopts = "export LD_LIBRARY_PATH=%(builddir)s/SuiteSparse-%(version)s/lib:$LD_LIBRARY_PATH && " - -modluafooter = 'add_property("arch","gpu")' - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1_JSC_GPUS.patch b/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1_JSC_GPUS.patch deleted file mode 100644 index a4ba6175414db3c318dbfce28da03627e7b5f7d8..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuiteSparse/SuiteSparse-5.7.1_JSC_GPUS.patch +++ /dev/null @@ -1,18 +0,0 @@ ---- SuiteSparse_config/SuiteSparse_config.mk.orig 2020-09-08 09:23:45.267015599 +0200 -+++ SuiteSparse_config/SuiteSparse_config.mk 2020-09-08 09:29:16.074006450 +0200 -@@ -243,12 +243,9 @@ - MAGMA_LIB = -L/opt/magma-2.4.0/lib/ -lmagma - NVCC = $(CUDA_PATH)/bin/nvcc - NVCCFLAGS = -Xcompiler -fPIC -O3 \ -- -gencode=arch=compute_30,code=sm_30 \ -- -gencode=arch=compute_35,code=sm_35 \ -- -gencode=arch=compute_50,code=sm_50 \ -- -gencode=arch=compute_53,code=sm_53 \ -- -gencode=arch=compute_53,code=sm_53 \ -- -gencode=arch=compute_60,code=compute_60 -+ -gencode=arch=compute_70,code=sm_70 \ -+ -gencode=arch=compute_75,code=sm_75 \ -+ -gencode=arch=compute_80,code=sm_80 - endif - - #--------------------------------------------------------------------------- diff --git a/Golden_Repo/s/SuperLU/SuperLU-5.2.2-gpsmkl-2020.eb b/Golden_Repo/s/SuperLU/SuperLU-5.2.2-gpsmkl-2020.eb deleted file mode 100644 index a09d37496ae7bd075a3b0cfe39362a54664192c1..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuperLU/SuperLU-5.2.2-gpsmkl-2020.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = "EB_SuperLU" - -name = 'SuperLU' -version = '5.2.2' - -homepage = 'https://crd-legacy.lbl.gov/~xiaoye/SuperLU/' -description = """SuperLU is a general purpose library for the direct solution of large, sparse, nonsymmetric systems - of linear equations on high performance machines.""" - -site_contacts = 'Filipe Guimaraes <f.guimaraes@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'pic': True, 'openmp': True} - -github_account = 'xiaoyeli' -source_urls = [GITHUB_LOWER_SOURCE] -sources = ["v%(version)s.tar.gz"] -checksums = ['470334a72ba637578e34057f46948495e601a5988a602604f5576367e606a28c'] - -builddependencies = [('CMake', '3.18.0')] - -configopts = "-DUSE_XSDK_DEFAULTS=true" - -sanity_check_paths = { - 'files': ['lib64/libsuperlu.a'], - 'dirs': ['include'] -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/SuperLU/SuperLU-5.2.2-intel-para-2020.eb b/Golden_Repo/s/SuperLU/SuperLU-5.2.2-intel-para-2020.eb deleted file mode 100644 index 428a6f58ad6b8b3eea79322525b7064f8ded4e08..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/SuperLU/SuperLU-5.2.2-intel-para-2020.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = "EB_SuperLU" - -name = 'SuperLU' -version = '5.2.2' - -homepage = 'https://crd-legacy.lbl.gov/~xiaoye/SuperLU/' -description = """SuperLU is a general purpose library for the direct solution of large, sparse, nonsymmetric systems - of linear equations on high performance machines.""" - -site_contacts = 'Filipe Guimaraes <f.guimaraes@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'pic': True, 'openmp': True} - -github_account = 'xiaoyeli' -source_urls = [GITHUB_LOWER_SOURCE] -sources = ["v%(version)s.tar.gz"] -checksums = ['470334a72ba637578e34057f46948495e601a5988a602604f5576367e606a28c'] - -builddependencies = [('CMake', '3.18.0')] - -configopts = "-DUSE_XSDK_DEFAULTS=true" - -sanity_check_paths = { - 'files': ['lib64/libsuperlu.a'], - 'dirs': ['include'] -} - -moduleclass = 'numlib' diff --git a/Golden_Repo/s/Szip/Szip-2.1.1-GCCcore-10.3.0.eb b/Golden_Repo/s/Szip/Szip-2.1.1-GCCcore-10.3.0.eb deleted file mode 100644 index 7f7dc8841883ec987b2cbbc373c05115f9930d8c..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Szip/Szip-2.1.1-GCCcore-10.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Szip' -version = '2.1.1' - -homepage = 'http://www.hdfgroup.org/doc_resource/SZIP/' -description = """Szip compression software, providing lossless compression of -scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://www.hdfgroup.org/ftp/lib-external/szip/%(version)s/src'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['897dda94e1d4bf88c91adeaad88c07b468b18eaf2d6125c47acac57e540904a9'] - -builddependencies = [ - ('binutils', '2.36.1') -] - -configopts = "--with-pic" - -sanity_check_paths = { - 'files': ["lib/libsz.a", "lib/libsz.%s" % SHLIB_EXT] + - ["include/%s" % x for x in ["ricehdf.h", "szip_adpt.h", "szlib.h"]], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/Szip/Szip-2.1.1-GCCcore-9.3.0.eb b/Golden_Repo/s/Szip/Szip-2.1.1-GCCcore-9.3.0.eb deleted file mode 100644 index 48c150162c299ea12b60a32c279f0d243ee81f1d..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/Szip/Szip-2.1.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Szip' -version = '2.1.1' - -homepage = 'http://www.hdfgroup.org/doc_resource/SZIP/' -description = """Szip compression software, providing lossless compression of -scientific data. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://www.hdfgroup.org/ftp/lib-external/szip/%(version)s/src'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['897dda94e1d4bf88c91adeaad88c07b468b18eaf2d6125c47acac57e540904a9'] - -builddependencies = [ - ('binutils', '2.34') -] - -configopts = "--with-pic" - -sanity_check_paths = { - 'files': ["lib/libsz.a", "lib/libsz.%s" % SHLIB_EXT] + - ["include/%s" % x for x in ["ricehdf.h", "szip_adpt.h", "szlib.h"]], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/s/scikit/scikit-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/s/scikit/scikit-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index b8d05baaac42b4d9466180d326fdcca01e6dd7e5..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/scikit/scikit-2020-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'Bundle' -name = 'scikit' -version = '2020' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://www.scipy.org/scikits.html' -description = """SciKits (short for SciPy Toolkits), are add-on packages for SciPy, hosted and developed separately and -independently from the main SciPy distribution.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('dask', '2.22.0', versionsuffix), - ('SciPy-Stack', version, versionsuffix), - ('CMake', '3.18.0'), -] - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -exts_list = [ - ('joblib', '0.17.0', { - 'source_urls': ['https://pypi.python.org/packages/source/j/joblib'], - }), - ('scikit-learn', '0.23.2', { - 'source_urls': ['https://pypi.python.org/packages/source/s/scikit-learn'], - 'modulename': 'sklearn', - }), - ('PyWavelets', '1.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/PyWavelets'], - 'modulename': 'pywt', - }), - ('imageio', '2.9.0', { - 'source_urls': ['https://pypi.python.org/packages/source/i/imageio'], - }), - ('tifffile', '2020.10.1', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tifffile'], - }), - ('scikit-image', '0.17.2', { - 'source_urls': ['https://pypi.python.org/packages/source/s/scikit-image'], - 'modulename': 'skimage', - }), - ('distro', '1.5.0', { - 'source_urls': ['https://pypi.python.org/packages/source/d/distro'], - 'checksums': [('sha256', '0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92')], - }), - ('scikit-build', '0.11.1', { - 'source_urls': ['https://pypi.python.org/packages/source/s/scikit-build'], - 'checksums': [('sha256', 'da40dfd69b2456fad1349a894b90180b43712152b8a85d2a00f4ae2ce8ac9a5c')], - 'modulename': 'skbuild', - }), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'vis' diff --git a/Golden_Repo/s/scikit/scikit-2021-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/s/scikit/scikit-2021-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index e45b7a5033aff22c4e951221ab11531c2bb97378..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/scikit/scikit-2021-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'Bundle' -name = 'scikit' -version = '2021' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://www.scipy.org/scikits.html' -description = """SciKits (short for SciPy Toolkits), are add-on packages for SciPy, hosted and developed separately and -independently from the main SciPy distribution.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - -dependencies = [ - ('Python', '3.8.5'), - ('dask', '2.22.0', versionsuffix), - ('SciPy-Stack', version, versionsuffix), - ('CMake', '3.18.0', '', SYSTEM), -] - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_filter = ('python -c "import %(ext_name)s"', '') -exts_download_dep_fail = True - -exts_list = [ - ('joblib', '0.17.0', { - 'source_urls': ['https://pypi.python.org/packages/source/j/joblib'], - }), - ('scikit-learn', '0.23.2', { - 'source_urls': ['https://pypi.python.org/packages/source/s/scikit-learn'], - 'modulename': 'sklearn', - }), - ('PyWavelets', '1.1.1', { - 'source_urls': ['https://pypi.python.org/packages/source/p/PyWavelets'], - 'modulename': 'pywt', - }), - ('imageio', '2.9.0', { - 'source_urls': ['https://pypi.python.org/packages/source/i/imageio'], - }), - ('tifffile', '2020.10.1', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tifffile'], - }), - ('scikit-image', '0.17.2', { - 'source_urls': ['https://pypi.python.org/packages/source/s/scikit-image'], - 'modulename': 'skimage', - }), - ('distro', '1.5.0', { - 'source_urls': ['https://pypi.python.org/packages/source/d/distro'], - 'checksums': [('sha256', '0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92')], - }), - ('scikit-build', '0.11.1', { - 'source_urls': ['https://pypi.python.org/packages/source/s/scikit-build'], - 'checksums': [('sha256', 'da40dfd69b2456fad1349a894b90180b43712152b8a85d2a00f4ae2ce8ac9a5c')], - 'modulename': 'skbuild', - }), -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -moduleclass = 'vis' diff --git a/Golden_Repo/s/snappy/snappy-1.1.8-GCCcore-10.3.0.eb b/Golden_Repo/s/snappy/snappy-1.1.8-GCCcore-10.3.0.eb deleted file mode 100644 index 1a2bdcfe754c70dff9a9ad34a8460ba76ca9ffbd..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/snappy/snappy-1.1.8-GCCcore-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'snappy' -version = '1.1.8' - -homepage = 'https://github.com/google/snappy' -description = """Snappy is a compression/decompression library. It does not aim -for maximum compression, or compatibility with any other compression library; -instead, it aims for very high speeds and reasonable compression.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/google/snappy/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), -] - -configopts = ['', '-DBUILD_SHARED_LIBS=ON'] - -sanity_check_paths = { - 'files': ['lib64/libsnappy.a', 'lib64/libsnappy.%s' % SHLIB_EXT, 'include/snappy.h'], - 'dirs': [''] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/snappy/snappy-1.1.8-GCCcore-9.3.0.eb b/Golden_Repo/s/snappy/snappy-1.1.8-GCCcore-9.3.0.eb deleted file mode 100644 index 243e14872804207a6dc07a827ef224289031810b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/snappy/snappy-1.1.8-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'snappy' -version = '1.1.8' - -homepage = 'https://github.com/google/snappy' -description = """Snappy is a compression/decompression library. It does not aim -for maximum compression, or compatibility with any other compression library; -instead, it aims for very high speeds and reasonable compression.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/google/snappy/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), -] - -configopts = ['', '-DBUILD_SHARED_LIBS=ON'] - -sanity_check_paths = { - 'files': ['lib64/libsnappy.a', 'lib64/libsnappy.%s' % SHLIB_EXT, 'include/snappy.h'], - 'dirs': [''] -} - -moduleclass = 'lib' diff --git a/Golden_Repo/s/spdlog/spdlog-1.8.2-GCCcore-10.3.0.eb b/Golden_Repo/s/spdlog/spdlog-1.8.2-GCCcore-10.3.0.eb deleted file mode 100644 index 8e4fc840e5c021e8c3f57c44c64dbe89b6fc3330..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/spdlog/spdlog-1.8.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'spdlog' -version = '1.8.2' - -homepage = "https://github.com/gabime/spdlog" -description = """Very fast, header-only/compiled, C++ logging library. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/gabime/spdlog/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('binutils', '2.36.1') -] - -sanity_check_paths = { - 'files': [], - 'dirs': [('include', 'lib64')] -} diff --git a/Golden_Repo/s/spdlog/spdlog-1.8.2-GCCcore-9.3.0.eb b/Golden_Repo/s/spdlog/spdlog-1.8.2-GCCcore-9.3.0.eb deleted file mode 100644 index ba5d0faeeb93c8025ccdc280286cfd3c80f9c751..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/spdlog/spdlog-1.8.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,26 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'spdlog' -version = '1.8.2' - -homepage = "https://github.com/gabime/spdlog" -description = """Very fast, header-only/compiled, C++ logging library. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/gabime/spdlog/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34') -] - -sanity_check_paths = { - 'files': [], - 'dirs': [('include', 'lib64')] -} diff --git a/Golden_Repo/s/sprng/sprng-1-gompi-2020.eb b/Golden_Repo/s/sprng/sprng-1-gompi-2020.eb deleted file mode 100644 index 41cdf74203c443d1f5fe7df192fb598d7e9bc4af..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-1-gompi-2020.eb +++ /dev/null @@ -1,103 +0,0 @@ -easyblock = "MakeCp" - -name = 'sprng' -version = '1' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 1 has been installed as module in - -$EBROOTSPRNG or $SPRNG1_ROOT - -This version has a different library for each random number generator. -""" - -examples = """ -Examples can be found in $EBROOTSPRNG/EXAMPLES -To compile and execute the examples say - -cp -r $EBROOTSPRNG/EXAMPLES . -make all - -and for execution: - -./batchseq for the sequential examples -./batchmpi on an interactive node for the parallel examples - -For more information read EXAMPLES/README and README -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['sprng.tar.gz'] - -patches = [ - 'sprng1.patch', - 'sprng1_2.patch', -] - -dependencies = [ - ('GMP', '6.2.0') -] - -files_to_copy = [ - (['lib/libcmrg.a', - 'lib/liblcg.a', - 'lib/liblcg64.a', - 'lib/liblfg.a', - 'lib/libmlfg.a', - 'lib/libsprngtest.a'], - 'lib'), - (['include/interface.h', - 'include/sprng.h', - 'include/sprng_f.h'], - 'include'), - (['EXAMPLES/README', - 'EXAMPLES/*.c', - 'EXAMPLES/*.C', - 'EXAMPLES/*.F', - 'EXAMPLES/batch*', - 'EXAMPLES/*in', - 'EXAMPLES/Makefile'], - 'EXAMPLES'), - (['DOCS/README', - 'DOCS/sprng.html.tar.Z'], - 'DOCS'), - (['README', - 'VERSION', - 'CHANGES.TEXT'], - 'share') -] - -sanity_check_paths = { - 'files': ["lib/libcmrg.a", "lib/liblcg.a", "lib/liblcg64.a", "lib/liblfg.a", "lib/libmlfg.a", "lib/libsprngtest.a"], - 'dirs': ["include"], -} - -postinstallcmds = [ - "chmod 644 %(installdir)s/EXAMPLES/README", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 644 %(installdir)s/EXAMPLES/*.c", - "chmod 644 %(installdir)s/EXAMPLES/*.C", - "chmod 644 %(installdir)s/EXAMPLES/*.F", - "chmod 644 %(installdir)s/EXAMPLES/*in", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 755 %(installdir)s/EXAMPLES/batch*", - "chmod 644 %(installdir)s/include/*", - "chmod 644 %(installdir)s/share/*", - "chmod 644 %(installdir)s/DOCS/*", - "rm %(installdir)s/EXAMPLES/*.orig" -] - -modextravars = { - 'SPRNG1_ROOT': '%(installdir)s', - 'SPRNG1_LIB': '%(installdir)s/lib', - 'SPRNG1_INCLUDE': '%(installdir)s/include' -} - -parallel = 1 - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-1-gpsmpi-2020.eb b/Golden_Repo/s/sprng/sprng-1-gpsmpi-2020.eb deleted file mode 100644 index eb4824332a237c103eae2aeb967e848de0f9e404..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-1-gpsmpi-2020.eb +++ /dev/null @@ -1,103 +0,0 @@ -easyblock = "MakeCp" - -name = 'sprng' -version = '1' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 1 has been installed as module in - -$EBROOTSPRNG or $SPRNG1_ROOT - -This version has a different library for each random number generator. -""" - -examples = """ -Examples can be found in $EBROOTSPRNG/EXAMPLES -To compile and execute the examples say - -cp -r $EBROOTSPRNG/EXAMPLES . -make all - -and for execution: - -./batchseq for the sequential examples -./batchmpi on an interactive node for the parallel examples - -For more information read EXAMPLES/README and README -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['sprng.tar.gz'] - -patches = [ - 'sprng1.patch', - 'sprng1_2.patch', -] - -dependencies = [ - ('GMP', '6.2.0') -] - -files_to_copy = [ - (['lib/libcmrg.a', - 'lib/liblcg.a', - 'lib/liblcg64.a', - 'lib/liblfg.a', - 'lib/libmlfg.a', - 'lib/libsprngtest.a'], - 'lib'), - (['include/interface.h', - 'include/sprng.h', - 'include/sprng_f.h'], - 'include'), - (['EXAMPLES/README', - 'EXAMPLES/*.c', - 'EXAMPLES/*.C', - 'EXAMPLES/*.F', - 'EXAMPLES/batch*', - 'EXAMPLES/*in', - 'EXAMPLES/Makefile'], - 'EXAMPLES'), - (['DOCS/README', - 'DOCS/sprng.html.tar.Z'], - 'DOCS'), - (['README', - 'VERSION', - 'CHANGES.TEXT'], - 'share') -] - -sanity_check_paths = { - 'files': ["lib/libcmrg.a", "lib/liblcg.a", "lib/liblcg64.a", "lib/liblfg.a", "lib/libmlfg.a", "lib/libsprngtest.a"], - 'dirs': ["include"], -} - -postinstallcmds = [ - "chmod 644 %(installdir)s/EXAMPLES/README", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 644 %(installdir)s/EXAMPLES/*.c", - "chmod 644 %(installdir)s/EXAMPLES/*.C", - "chmod 644 %(installdir)s/EXAMPLES/*.F", - "chmod 644 %(installdir)s/EXAMPLES/*in", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 755 %(installdir)s/EXAMPLES/batch*", - "chmod 644 %(installdir)s/include/*", - "chmod 644 %(installdir)s/share/*", - "chmod 644 %(installdir)s/DOCS/*", - "rm %(installdir)s/EXAMPLES/*.orig" -] - -modextravars = { - 'SPRNG1_ROOT': '%(installdir)s', - 'SPRNG1_LIB': '%(installdir)s/lib', - 'SPRNG1_INCLUDE': '%(installdir)s/include' -} - -parallel = 1 - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-1-iimpi-2020.eb b/Golden_Repo/s/sprng/sprng-1-iimpi-2020.eb deleted file mode 100644 index 4b30c175e48e60d0477c8fb5fa0d869c175d90e5..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-1-iimpi-2020.eb +++ /dev/null @@ -1,103 +0,0 @@ -easyblock = "MakeCp" - -name = 'sprng' -version = '1' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 1 has been installed as module in - -$EBROOTSPRNG or $SPRNG1_ROOT - -This version has a different library for each random number generator. -""" - -examples = """ -Examples can be found in $EBROOTSPRNG/EXAMPLES -To compile and execute the examples say - -cp -r $EBROOTSPRNG/EXAMPLES . -make all - -and for execution: - -./batchseq for the sequential examples -./batchmpi on an interactive node for the parallel examples - -For more information read EXAMPLES/README and README -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['sprng.tar.gz'] - -patches = [ - 'sprng1.patch', - 'sprng1_2.patch', -] - -dependencies = [ - ('GMP', '6.2.0') -] - -files_to_copy = [ - (['lib/libcmrg.a', - 'lib/liblcg.a', - 'lib/liblcg64.a', - 'lib/liblfg.a', - 'lib/libmlfg.a', - 'lib/libsprngtest.a'], - 'lib'), - (['include/interface.h', - 'include/sprng.h', - 'include/sprng_f.h'], - 'include'), - (['EXAMPLES/README', - 'EXAMPLES/*.c', - 'EXAMPLES/*.C', - 'EXAMPLES/*.F', - 'EXAMPLES/batch*', - 'EXAMPLES/*in', - 'EXAMPLES/Makefile'], - 'EXAMPLES'), - (['DOCS/README', - 'DOCS/sprng.html.tar.Z'], - 'DOCS'), - (['README', - 'VERSION', - 'CHANGES.TEXT'], - 'share') -] - -sanity_check_paths = { - 'files': ["lib/libcmrg.a", "lib/liblcg.a", "lib/liblcg64.a", "lib/liblfg.a", "lib/libmlfg.a", "lib/libsprngtest.a"], - 'dirs': ["include"], -} - -postinstallcmds = [ - "chmod 644 %(installdir)s/EXAMPLES/README", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 644 %(installdir)s/EXAMPLES/*.c", - "chmod 644 %(installdir)s/EXAMPLES/*.C", - "chmod 644 %(installdir)s/EXAMPLES/*.F", - "chmod 644 %(installdir)s/EXAMPLES/*in", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 755 %(installdir)s/EXAMPLES/batch*", - "chmod 644 %(installdir)s/include/*", - "chmod 644 %(installdir)s/share/*", - "chmod 644 %(installdir)s/DOCS/*", - "rm %(installdir)s/EXAMPLES/*.orig" -] - -modextravars = { - 'SPRNG1_ROOT': '%(installdir)s', - 'SPRNG1_LIB': '%(installdir)s/lib', - 'SPRNG1_INCLUDE': '%(installdir)s/include' -} - -parallel = 1 - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-1-iompi-2020.eb b/Golden_Repo/s/sprng/sprng-1-iompi-2020.eb deleted file mode 100644 index a9673086a3e81c7c3cc10e38ec5ef8abe0e7840b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-1-iompi-2020.eb +++ /dev/null @@ -1,103 +0,0 @@ -easyblock = "MakeCp" - -name = 'sprng' -version = '1' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 1 has been installed as module in - -$EBROOTSPRNG or $SPRNG1_ROOT - -This version has a different library for each random number generator. -""" - -examples = """ -Examples can be found in $EBROOTSPRNG/EXAMPLES -To compile and execute the examples say - -cp -r $EBROOTSPRNG/EXAMPLES . -make all - -and for execution: - -./batchseq for the sequential examples -./batchmpi on an interactive node for the parallel examples - -For more information read EXAMPLES/README and README -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['sprng.tar.gz'] - -patches = [ - 'sprng1.patch', - 'sprng1_2.patch', -] - -dependencies = [ - ('GMP', '6.2.0') -] - -files_to_copy = [ - (['lib/libcmrg.a', - 'lib/liblcg.a', - 'lib/liblcg64.a', - 'lib/liblfg.a', - 'lib/libmlfg.a', - 'lib/libsprngtest.a'], - 'lib'), - (['include/interface.h', - 'include/sprng.h', - 'include/sprng_f.h'], - 'include'), - (['EXAMPLES/README', - 'EXAMPLES/*.c', - 'EXAMPLES/*.C', - 'EXAMPLES/*.F', - 'EXAMPLES/batch*', - 'EXAMPLES/*in', - 'EXAMPLES/Makefile'], - 'EXAMPLES'), - (['DOCS/README', - 'DOCS/sprng.html.tar.Z'], - 'DOCS'), - (['README', - 'VERSION', - 'CHANGES.TEXT'], - 'share') -] - -sanity_check_paths = { - 'files': ["lib/libcmrg.a", "lib/liblcg.a", "lib/liblcg64.a", "lib/liblfg.a", "lib/libmlfg.a", "lib/libsprngtest.a"], - 'dirs': ["include"], -} - -postinstallcmds = [ - "chmod 644 %(installdir)s/EXAMPLES/README", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 644 %(installdir)s/EXAMPLES/*.c", - "chmod 644 %(installdir)s/EXAMPLES/*.C", - "chmod 644 %(installdir)s/EXAMPLES/*.F", - "chmod 644 %(installdir)s/EXAMPLES/*in", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 755 %(installdir)s/EXAMPLES/batch*", - "chmod 644 %(installdir)s/include/*", - "chmod 644 %(installdir)s/share/*", - "chmod 644 %(installdir)s/DOCS/*", - "rm %(installdir)s/EXAMPLES/*.orig" -] - -modextravars = { - 'SPRNG1_ROOT': '%(installdir)s', - 'SPRNG1_LIB': '%(installdir)s/lib', - 'SPRNG1_INCLUDE': '%(installdir)s/include' -} - -parallel = 1 - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-1-ipsmpi-2020.eb b/Golden_Repo/s/sprng/sprng-1-ipsmpi-2020.eb deleted file mode 100644 index 726b715ec3dcc249d9fd25b5caf43fba2ad2f901..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-1-ipsmpi-2020.eb +++ /dev/null @@ -1,103 +0,0 @@ -easyblock = "MakeCp" - -name = 'sprng' -version = '1' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 1 has been installed as module in - -$EBROOTSPRNG or $SPRNG1_ROOT - -This version has a different library for each random number generator. -""" - -examples = """ -Examples can be found in $EBROOTSPRNG/EXAMPLES -To compile and execute the examples say - -cp -r $EBROOTSPRNG/EXAMPLES . -make all - -and for execution: - -./batchseq for the sequential examples -./batchmpi on an interactive node for the parallel examples - -For more information read EXAMPLES/README and README -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['sprng.tar.gz'] - -patches = [ - 'sprng1.patch', - 'sprng1_2.patch', -] - -dependencies = [ - ('GMP', '6.2.0') -] - -files_to_copy = [ - (['lib/libcmrg.a', - 'lib/liblcg.a', - 'lib/liblcg64.a', - 'lib/liblfg.a', - 'lib/libmlfg.a', - 'lib/libsprngtest.a'], - 'lib'), - (['include/interface.h', - 'include/sprng.h', - 'include/sprng_f.h'], - 'include'), - (['EXAMPLES/README', - 'EXAMPLES/*.c', - 'EXAMPLES/*.C', - 'EXAMPLES/*.F', - 'EXAMPLES/batch*', - 'EXAMPLES/*in', - 'EXAMPLES/Makefile'], - 'EXAMPLES'), - (['DOCS/README', - 'DOCS/sprng.html.tar.Z'], - 'DOCS'), - (['README', - 'VERSION', - 'CHANGES.TEXT'], - 'share') -] - -sanity_check_paths = { - 'files': ["lib/libcmrg.a", "lib/liblcg.a", "lib/liblcg64.a", "lib/liblfg.a", "lib/libmlfg.a", "lib/libsprngtest.a"], - 'dirs': ["include"], -} - -postinstallcmds = [ - "chmod 644 %(installdir)s/EXAMPLES/README", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 644 %(installdir)s/EXAMPLES/*.c", - "chmod 644 %(installdir)s/EXAMPLES/*.C", - "chmod 644 %(installdir)s/EXAMPLES/*.F", - "chmod 644 %(installdir)s/EXAMPLES/*in", - "chmod 644 %(installdir)s/EXAMPLES/Makefile", - "chmod 755 %(installdir)s/EXAMPLES/batch*", - "chmod 644 %(installdir)s/include/*", - "chmod 644 %(installdir)s/share/*", - "chmod 644 %(installdir)s/DOCS/*", - "rm %(installdir)s/EXAMPLES/*.orig" -] - -modextravars = { - 'SPRNG1_ROOT': '%(installdir)s', - 'SPRNG1_LIB': '%(installdir)s/lib', - 'SPRNG1_INCLUDE': '%(installdir)s/include' -} - -parallel = 1 - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-5-examples.patch b/Golden_Repo/s/sprng/sprng-5-examples.patch deleted file mode 100644 index 3f60d9248e8cf11f75d2eb94cd9ebb5c3d9e2b5f..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-5-examples.patch +++ /dev/null @@ -1,270 +0,0 @@ ---- sprng5/README 2015-10-26 16:56:55.000000000 +0100 -+++ sprng5_ok/README 2020-10-21 16:38:36.199551000 +0200 -@@ -1 +1,2 @@ --For technical assistance in installing or using SPRNG, please send email to sprng@cs.fsu.edu -\ No newline at end of file -+For technical assistance in installing or using SPRNG, please send email to sprng@cs.fsu.edu -+ ---- sprng5/EXAMPLES/F77/Makefile_1 1970-01-01 01:00:00.000000000 +0100 -+++ sprng5_ok/EXAMPLES/F77/Makefile_1 2020-10-21 16:43:33.589520000 +0200 -@@ -0,0 +1,62 @@ -+########################################################################## -+ -+SHELL = /bin/sh -+MPIDEF = -DSPRNG_MPI -+ -+CC = mpicc -+CLD = $(CC) -+F77 = mpif77 -+F77LD = $(F77) -+FFXN = -DAdd_ -+FSUFFIX = F -+CXX = mpicxx -+CXXLD = $(CXX) -+ -+DEFS = -DHAVE_CONFIG_H -DLONG64=long -+CFLAGS = -O3 -DLittleEndian $(MPIDEF) -+CLDFLAGS = -O3 -+ -+FFLAGS = -O2 $(MPIDEF) -DPOINTER_SIZE=8 -DLONG64=long -DINTEGER_STAR_8 -+F77LDFLAGS = -O3 -+ -+CPP = cpp -P -DPOINTER_SIZE=8 -+ -+LIBDIR = $(EBROOTSPRNG)/lib -+INCDIR = $(EBROOTSPRNG)/include -+C++LIBS = -lstdc++ -lm -lpthread -+ -+########################################################################## -+ -+FORTRAN = convertf pif-simple seedf-simple seedf simplef-simple spawnf \ -+ sprngf-simple sprngf subroutinef -+ -+all : $(FORTRAN) -+ -+convertf : convertf.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o convertf convertf.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+pif-simple : pif-simple.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o pif-simple pif-simple.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+seedf-simple : seedf-simple.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o seedf-simple seedf-simple.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+seedf : seedf.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o seedf seedf.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+simplef-simple : simplef-simple.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o simplef-simple simplef-simple.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+spawnf : spawnf.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o spawnf spawnf.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+sprngf-simple : sprngf-simple.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o sprngf-simple sprngf-simple.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+sprngf : sprngf.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o sprngf sprngf.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+ -+subroutinef : subroutinef.F -+ $(F77) $(FFLAGS) -I$(INCDIR) -o subroutinef subroutinef.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ ---- sprng5/EXAMPLES/Makefile_cxx 1970-01-01 01:00:00.000000000 +0100 -+++ sprng5_ok/EXAMPLES/Makefile_cxx 2020-10-21 16:46:08.468251000 +0200 -@@ -0,0 +1,60 @@ -+##### -+ -+SHELL = /bin/sh -+MPIDEF = -DSPRNG_MPI -+ -+CC = mpicc -+CLD = $(CC) -+F77 = mpif90 -+F77LD = $(F77) -+FFXN = -DAdd_ -+FSUFFIX = F -+CXX = mpicxx -+CXXLD = $(CXX) -+ -+DEFS = -DHAVE_CONFIG_H -DLONG64=long -+CFLAGS = -O3 -DLittleEndian $(MPIDEF) -+CLDFLAGS = -O3 -+ -+FFLAGS = -O2 $(MPIDEF) -DPOINTER_SIZE=8 -DLONG64=long -DINTEGER_STAR_8 -+F77LDFLAGS = -O3 -+ -+CPP = cpp -P -DPOINTER_SIZE=8 -+ -+LIBDIR = $(EBROOTSPRNG)/lib -+INCDIR = $(EBROOTSPRNG)/include -+ -+########################################################################## -+ -+EX = sprng seed sprng-simple simple-simple seed-simple \ -+ spawn convert displaybytes pi-simple -+ -+all : $(EX) -+ -+sprng : sprng.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o sprng sprng.cpp -L$(LIBDIR) -lsprng -+ -+seed : seed.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o seed seed.cpp -L$(LIBDIR) -lsprng -+ -+sprng-simple : sprng-simple.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o sprng-simple sprng-simple.cpp -L$(LIBDIR) -lsprng -+ -+simple-simple : simple-simple.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o simple-simple simple-simple.cpp -L$(LIBDIR) -lsprng -+ -+seed-simple : seed-simple.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o seed-simple seed-simple.cpp -L$(LIBDIR) -lsprng -+ -+spawn : spawn.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o spawn spawn.cpp -L$(LIBDIR) -lsprng -+ -+convert : convert.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o convert convert.cpp -L$(LIBDIR) -lsprng -+ -+displaybytes : displaybytes.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o displaybytes displaybytes.cpp -L$(LIBDIR) -lsprng -+ -+pi-simple : pi-simple.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o pi-simple pi-simple.cpp -L$(LIBDIR) -lsprng -+ ---- sprng5/EXAMPLES/mpisprng/F77/Makefile_1 1970-01-01 01:00:00.000000000 +0100 -+++ sprng5_ok/EXAMPLES/mpisprng/F77/Makefile_1 2020-10-21 16:49:29.865976000 +0200 -@@ -0,0 +1,64 @@ -+########################################################################## -+ -+SHELL = /bin/sh -+MPIDEF = -DSPRNG_MPI -+ -+CC = mpicc -+CLD = $(CC) -+F77 = mpif77 -+F77LD = $(F77) -+FFXN = -DAdd_ -+FSUFFIX = F -+CXX = mpicxx -+CXXLD = $(CXX) -+ -+DEFS = -DHAVE_CONFIG_H -DLONG64=long -+CFLAGS = -O3 -DLittleEndian $(MPIDEF) -+CLDFLAGS = -O3 -+ -+FFLAGS = -O2 $(MPIDEF) -DPOINTER_SIZE=8 -DLONG64=long -DINTEGER_STAR_8 -+F77LDFLAGS = -O3 -+ -+CPP = cpp -P -DPOINTER_SIZE=8 -+ -+LIBDIR = $(EBROOTSPRNG)/lib -+INCDIR = $(EBROOTSPRNG)/include -+INCLUDE = -I$(INCDIR) -I../../F77 -+C++LIBS = -lstdc++ -lm -lpthread -+ -+########################################################################## -+ -+MPIFORTRAN = 2streamsf_mpi fsprngf-simple_mpi fsprngf_mpi \ -+ messagef-simple_mpi messagef_mpi \ -+ seedf-simple_mpi seedf_mpi \ -+ sprngf-simple_mpi sprngf_mpi -+ -+all : $(MPIFORTRAN) -+ -+fsprngf-simple_mpi : fsprngf-simple_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o fsprngf-simple_mpi fsprngf-simple_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+fsprngf_mpi : fsprngf_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o fsprngf_mpi fsprngf_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+messagef-simple_mpi : messagef-simple_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o messagef-simple_mpi messagef-simple_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+messagef_mpi : messagef_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o messagef_mpi messagef_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+seedf-simple_mpi : seedf-simple_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o seedf-simple_mpi seedf-simple_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+seedf_mpi : seedf_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o seedf_mpi seedf_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+sprngf-simple_mpi : sprngf-simple_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o sprngf-simple_mpi sprngf-simple_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+sprngf_mpi : sprngf_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o sprngf_mpi sprngf_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ -+2streamsf_mpi : 2streamsf_mpi.F -+ $(F77) $(FFLAGS) $(INCLUDE) -o 2streamsf_mpi 2streamsf_mpi.F -L$(LIBDIR) -lsprng $(C++LIBS) -+ ---- sprng5/EXAMPLES/mpisprng/Makefile_2 1970-01-01 01:00:00.000000000 +0100 -+++ sprng5_ok/EXAMPLES/mpisprng/Makefile_2 2020-10-21 16:52:27.241717000 +0200 -@@ -0,0 +1,65 @@ -+##### -+ -+SHELL = /bin/sh -+MPIDEF = -DSPRNG_MPI -+ -+CC = mpicc -+CLD = $(CC) -+F77 = mpif90 -+F77LD = $(F77) -+FFXN = -DAdd_ -+FSUFFIX = F -+CXX = mpicxx -+CXXLD = $(CXX) -+ -+DEFS = -DHAVE_CONFIG_H -DLONG64=long -+CFLAGS = -O3 -DLittleEndian $(MPIDEF) -+CLDFLAGS = -O3 -+ -+FFLAGS = -O2 $(MPIDEF) -DPOINTER_SIZE=8 -DLONG64=long -DINTEGER_STAR_8 -+F77LDFLAGS = -O3 -+ -+CPP = cpp -P -DPOINTER_SIZE=8 -+ -+LIBDIR = $(EBROOTSPRNG)/lib -+INCDIR = $(EBROOTSPRNG)/include -+ -+########################################################################## -+ -+MPIEX = 2streams_mpi fsprng-simple_mpi fsprng_mpi message-simple_mpi \ -+ message_mpi pi-simple_mpi seed-simple_mpi seed_mpi \ -+ sprng-simple_mpi sprng_mpi -+ -+all : $(MPIEX) -+ -+2streams_mpi : 2streams_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o 2streams_mpi 2streams_mpi.cpp -L$(LIBDIR) -lsprng -+ -+fsprng-simple_mpi : fsprng-simple_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o fsprng-simple_mpi fsprng-simple_mpi.cpp -L$(LIBDIR) -lsprng -+ -+fsprng_mpi : fsprng_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o fsprng_mpi fsprng_mpi.cpp -L$(LIBDIR) -lsprng -+ -+message-simple_mpi : message-simple_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o message-simple_mpi message-simple_mpi.cpp -L$(LIBDIR) -lsprng -+ -+message_mpi : message_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o message_mpi message_mpi.cpp -L$(LIBDIR) -lsprng -+ -+pi-simple_mpi : pi-simple_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o pi-simple_mpi pi-simple_mpi.cpp -L$(LIBDIR) -lsprng -+ -+seed-simple_mpi : seed-simple_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o seed-simple_mpi seed-simple_mpi.cpp -L$(LIBDIR) -lsprng -+ -+seed_mpi : seed_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o seed_mpi seed_mpi.cpp -L$(LIBDIR) -lsprng -+ -+sprng-simple_mpi : sprng-simple_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o sprng-simple_mpi sprng-simple_mpi.cpp -L$(LIBDIR) -lsprng -+ -+sprng_mpi : sprng_mpi.cpp -+ $(CXX) $(CXXFLAGS) -I$(INCDIR) -o sprng_mpi sprng_mpi.cpp -L$(LIBDIR) -lsprng -+ -+ diff --git a/Golden_Repo/s/sprng/sprng-5-gompi-2020-14042019.eb b/Golden_Repo/s/sprng/sprng-5-gompi-2020-14042019.eb deleted file mode 100644 index fcc224757d570a748ec7ff4beddaa84237fb0f3f..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-5-gompi-2020-14042019.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# This work implements sprng5 -# http://www.sprng.org/ -## - -easyblock = 'ConfigureMake' -name = 'sprng' -version = '5' -versionsuffix = '-14042019' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 5 has been installed as module in - -$EBROOTSPRNG - -This version contains all different random number generators in one library, -the generator has to be chosen via an input parameter. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['%(name)s%(version)s%(versionsuffix)s.tar.bz2'] - -patches = [ - 'sprng-5-examples.patch', -] - -configopts = '--with-mpi --with-fortran' - -parallel = 1 - -postinstallcmds = [ - "cp -r include %(installdir)s/", - "cp -r EXAMPLES %(installdir)s", - "cp DOCS/README %(installdir)s", - "cp AUTHORS %(installdir)s", - "cp COPYING %(installdir)s", - "cp LICENSE %(installdir)s", - "mv %(installdir)s/EXAMPLES/Makefile_cxx %(installdir)s/EXAMPLES/Makefile", - "rm %(installdir)s/EXAMPLES/Makefile*.*", - "rm %(installdir)s/EXAMPLES/*.sprng", - "mv %(installdir)s/EXAMPLES/F77/Makefile_1 %(installdir)s/EXAMPLES/F77/Makefile", - "rm %(installdir)s/EXAMPLES/F77/Makefile*.*", - "rm %(installdir)s/EXAMPLES/F77/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/Makefile_2 %(installdir)s/EXAMPLES/mpisprng/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/Makefile*.*", - "rm %(installdir)s/EXAMPLES/mpisprng/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/F77/Makefile_1 %(installdir)s/EXAMPLES/mpisprng/F77/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/F77/Makefile*.*", -] - -modextravars = { - 'SPRNG5_ROOT': '%(installdir)s', - 'SPRNG5ROOT': '%(installdir)s', - 'SPRNG5_LIB': '%(installdir)s/lib', - 'SPRNG5_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-5-gpsmpi-2020-14042019.eb b/Golden_Repo/s/sprng/sprng-5-gpsmpi-2020-14042019.eb deleted file mode 100644 index 8d1cc0792c07e2f87b7efba00ff3f3d9938d659c..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-5-gpsmpi-2020-14042019.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# This work implements sprng5 -# http://www.sprng.org/ -## - -easyblock = 'ConfigureMake' -name = 'sprng' -version = '5' -versionsuffix = '-14042019' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 5 has been installed as module in - -$EBROOTSPRNG - -This version contains all different random number generators in one library, -the generator has to be chosen via an input parameter. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['%(name)s%(version)s%(versionsuffix)s.tar.bz2'] - -patches = [ - 'sprng-5-examples.patch', -] - -configopts = '--with-mpi --with-fortran' - -parallel = 1 - -postinstallcmds = [ - "cp -r include %(installdir)s/", - "cp -r EXAMPLES %(installdir)s", - "cp DOCS/README %(installdir)s", - "cp AUTHORS %(installdir)s", - "cp COPYING %(installdir)s", - "cp LICENSE %(installdir)s", - "mv %(installdir)s/EXAMPLES/Makefile_cxx %(installdir)s/EXAMPLES/Makefile", - "rm %(installdir)s/EXAMPLES/Makefile*.*", - "rm %(installdir)s/EXAMPLES/*.sprng", - "mv %(installdir)s/EXAMPLES/F77/Makefile_1 %(installdir)s/EXAMPLES/F77/Makefile", - "rm %(installdir)s/EXAMPLES/F77/Makefile*.*", - "rm %(installdir)s/EXAMPLES/F77/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/Makefile_2 %(installdir)s/EXAMPLES/mpisprng/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/Makefile*.*", - "rm %(installdir)s/EXAMPLES/mpisprng/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/F77/Makefile_1 %(installdir)s/EXAMPLES/mpisprng/F77/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/F77/Makefile*.*", -] - -modextravars = { - 'SPRNG5_ROOT': '%(installdir)s', - 'SPRNG5ROOT': '%(installdir)s', - 'SPRNG5_LIB': '%(installdir)s/lib', - 'SPRNG5_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-5-iimpi-2020-14042019.eb b/Golden_Repo/s/sprng/sprng-5-iimpi-2020-14042019.eb deleted file mode 100644 index d8e636081b755bc8638e1062967425fb8c931c72..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-5-iimpi-2020-14042019.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# This work implements sprng5 -# http://www.sprng.org/ -## - -easyblock = 'ConfigureMake' -name = 'sprng' -version = '5' -versionsuffix = '-14042019' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 5 has been installed as module in - -$EBROOTSPRNG - -This version contains all different random number generators in one library, -the generator has to be chosen via an input parameter. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['%(name)s%(version)s%(versionsuffix)s.tar.bz2'] - -patches = [ - 'sprng-5-examples.patch', -] - -configopts = '--with-mpi --with-fortran' - -parallel = 1 - -postinstallcmds = [ - "cp -r include %(installdir)s/", - "cp -r EXAMPLES %(installdir)s", - "cp DOCS/README %(installdir)s", - "cp AUTHORS %(installdir)s", - "cp COPYING %(installdir)s", - "cp LICENSE %(installdir)s", - "mv %(installdir)s/EXAMPLES/Makefile_cxx %(installdir)s/EXAMPLES/Makefile", - "rm %(installdir)s/EXAMPLES/Makefile*.*", - "rm %(installdir)s/EXAMPLES/*.sprng", - "mv %(installdir)s/EXAMPLES/F77/Makefile_1 %(installdir)s/EXAMPLES/F77/Makefile", - "rm %(installdir)s/EXAMPLES/F77/Makefile*.*", - "rm %(installdir)s/EXAMPLES/F77/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/Makefile_2 %(installdir)s/EXAMPLES/mpisprng/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/Makefile*.*", - "rm %(installdir)s/EXAMPLES/mpisprng/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/F77/Makefile_1 %(installdir)s/EXAMPLES/mpisprng/F77/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/F77/Makefile*.*", -] - -modextravars = { - 'SPRNG5_ROOT': '%(installdir)s', - 'SPRNG5ROOT': '%(installdir)s', - 'SPRNG5_LIB': '%(installdir)s/lib', - 'SPRNG5_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-5-iompi-2020-14042019.eb b/Golden_Repo/s/sprng/sprng-5-iompi-2020-14042019.eb deleted file mode 100644 index f933552ffafaed7653eca553a4a6d16eb49e6b10..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-5-iompi-2020-14042019.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# This work implements sprng5 -# http://www.sprng.org/ -## - -easyblock = 'ConfigureMake' -name = 'sprng' -version = '5' -versionsuffix = '-14042019' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 5 has been installed as module in - -$EBROOTSPRNG - -This version contains all different random number generators in one library, -the generator has to be chosen via an input parameter. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['%(name)s%(version)s%(versionsuffix)s.tar.bz2'] - -patches = [ - 'sprng-5-examples.patch', -] - -configopts = '--with-mpi --with-fortran' - -parallel = 1 - -postinstallcmds = [ - "cp -r include %(installdir)s/", - "cp -r EXAMPLES %(installdir)s", - "cp DOCS/README %(installdir)s", - "cp AUTHORS %(installdir)s", - "cp COPYING %(installdir)s", - "cp LICENSE %(installdir)s", - "mv %(installdir)s/EXAMPLES/Makefile_cxx %(installdir)s/EXAMPLES/Makefile", - "rm %(installdir)s/EXAMPLES/Makefile*.*", - "rm %(installdir)s/EXAMPLES/*.sprng", - "mv %(installdir)s/EXAMPLES/F77/Makefile_1 %(installdir)s/EXAMPLES/F77/Makefile", - "rm %(installdir)s/EXAMPLES/F77/Makefile*.*", - "rm %(installdir)s/EXAMPLES/F77/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/Makefile_2 %(installdir)s/EXAMPLES/mpisprng/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/Makefile*.*", - "rm %(installdir)s/EXAMPLES/mpisprng/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/F77/Makefile_1 %(installdir)s/EXAMPLES/mpisprng/F77/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/F77/Makefile*.*", -] - -modextravars = { - 'SPRNG5_ROOT': '%(installdir)s', - 'SPRNG5ROOT': '%(installdir)s', - 'SPRNG5_LIB': '%(installdir)s/lib', - 'SPRNG5_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng-5-ipsmpi-2020-14042019.eb b/Golden_Repo/s/sprng/sprng-5-ipsmpi-2020-14042019.eb deleted file mode 100644 index 42e89d54b13cb97abeaf797afa433cd6bfa72b0c..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng-5-ipsmpi-2020-14042019.eb +++ /dev/null @@ -1,67 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# This work implements sprng5 -# http://www.sprng.org/ -## - -easyblock = 'ConfigureMake' -name = 'sprng' -version = '5' -versionsuffix = '-14042019' - -homepage = 'http://www.sprng.org/' -description = """The Scalable Parallel Random Number Generators Library (SPRNG) version 5 has been installed as module in - -$EBROOTSPRNG - -This version contains all different random number generators in one library, -the generator has to be chosen via an input parameter. -""" - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True} - -source_urls = ['http://www.sprng.org/'] -sources = ['%(name)s%(version)s%(versionsuffix)s.tar.bz2'] - -patches = [ - 'sprng-5-examples.patch', -] - -configopts = '--with-mpi --with-fortran' - -parallel = 1 - -postinstallcmds = [ - "cp -r include %(installdir)s/", - "cp -r EXAMPLES %(installdir)s", - "cp DOCS/README %(installdir)s", - "cp AUTHORS %(installdir)s", - "cp COPYING %(installdir)s", - "cp LICENSE %(installdir)s", - "mv %(installdir)s/EXAMPLES/Makefile_cxx %(installdir)s/EXAMPLES/Makefile", - "rm %(installdir)s/EXAMPLES/Makefile*.*", - "rm %(installdir)s/EXAMPLES/*.sprng", - "mv %(installdir)s/EXAMPLES/F77/Makefile_1 %(installdir)s/EXAMPLES/F77/Makefile", - "rm %(installdir)s/EXAMPLES/F77/Makefile*.*", - "rm %(installdir)s/EXAMPLES/F77/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/Makefile_2 %(installdir)s/EXAMPLES/mpisprng/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/Makefile*.*", - "rm %(installdir)s/EXAMPLES/mpisprng/*.sprng", - "mv %(installdir)s/EXAMPLES/mpisprng/F77/Makefile_1 %(installdir)s/EXAMPLES/mpisprng/F77/Makefile", - "rm %(installdir)s/EXAMPLES/mpisprng/F77/Makefile*.*", -] - -modextravars = { - 'SPRNG5_ROOT': '%(installdir)s', - 'SPRNG5ROOT': '%(installdir)s', - 'SPRNG5_LIB': '%(installdir)s/lib', - 'SPRNG5_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sprng/sprng1.patch b/Golden_Repo/s/sprng/sprng1.patch deleted file mode 100644 index 223d18cdced41e6688600e08e5dab5f13895841a..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng1.patch +++ /dev/null @@ -1,111 +0,0 @@ ---- sprng/make.CHOICES 1998-05-28 16:55:52.000000000 +0200 -+++ sprng1_ok/make.CHOICES 2015-08-06 09:33:55.362818441 +0200 -@@ -24,12 +24,19 @@ - #PLAT = HP - #PLAT = LINUX - #PLAT = O2K --PLAT = SGI -+#PLAT = SGI - #PLAT = SOLARIS - #PLAT = SP2 # IBM SP2 - #PLAT = SUN - # For T3D, Use PLAT=T3E instead - #PLAT = T3E --#PLAT = GENERIC -+PLAT = GENERIC - - LIB_REL_DIR = lib -+ -+PMLCGDEF = -DUSE_PMLCG -+#GMP_ROOT = /usr/local/software/jureca/Stage3/software/Toolchain/intel-para/2015.07/GMP/6.0.0a -+GMP_ROOT = $(EBROOTGMP) -+GMPLIB = -Wl,-Bstatic -Wl,--start-group -L$(GMP_ROOT)/lib -lgmp -Wl,--end-group -Wl,-Bdynamic -+ -+MPIDEF = -DSPRNG_MPI ---- sprng/SRC/make.GENERIC 1998-07-03 20:33:16.000000000 +0200 -+++ sprng1_ok/SRC/make.GENERIC 2014-11-25 12:58:53.000000000 +0100 -@@ -1,36 +1,35 @@ - AR = ar --ARFLAGS = cr --#If your system has ranlib, then replace next statement with the one after it. --RANLIB = echo --#RANLIB = ranlib --CC = gcc -+ARFLAGS = -cr -+#If your system does not have ranlib, then replace next statement with -+#RANLIB = echo -+RANLIB = ranlib -+CC = mpicc - CLD = $(CC) --F77 = f77 -+# Set f77 to echo if you do not have a FORTRAN compiler -+F77 = mpif77 -+#F77 = echo - F77LD = $(F77) --FFXN = -DAdd_ -+FFXN = -DAdd_ - FSUFFIX = F - --MPIF77 = $(F77) --MPICC = $(CC) -+MPIF77 = mpif77 -+MPICC = mpicc - - # To use MPI, set the MPIDIR to location of mpi library, and MPILIB - # to name of mpi library. Remove # signs from beginning of next 3 lines. - # Also, if the previous compilation was without MPI, type: make realclean - # before compiling for mpi. - # --#MPIDEF = -DSPRNG_MPI #Only if you plan to use MPI --MPIDIR = --MPILIB = -- --# If _LONG_LONG type is available, then you can use the addition flag --# -D_LONG_LONG. Set F77 to echo to compile the C version alone. --# Try adding: -DGENERIC to CFLAGS. This can improve speed, but may give --# incorrect values. Check with 'checksprng' to see if it works. -- --CFLAGS = -O $(MPIDEF) --CLDFLAGS = -O --FFLAGS = -O $(MPIDEF) --F77LDFLAGS = -O -- --CPP = f77 -F -+# COMMENTED BY ME -+#MPIDIR = -L/usr/local/mpi/build/LINUX/ch_p4/lib -+#MPILIB = -lmpich -+ -+# Please include mpi header file path, if needed -+ -+CFLAGS = -O3 -DLittleEndian $(PMLCGDEF) $(MPIDEF) -D$(PLAT) -+CLDFLAGS = -O3 -+#FFLAGS = -O3 $(PMLCGDEF) $(MPIDEF) -D$(PLAT) -I/usr/local/mpi/include -I/usr/local/mpi/build/LINUX/ch_p4/include -I. -+FFLAGS = -O2 $(PMLCGDEF) $(MPIDEF) -D$(PLAT) -DPOINTER_SIZE=8 -+F77LDFLAGS = -O3 - -+CPP = cpp -P -DPOINTER_SIZE=8 ---- sprng/Makefile 1998-03-23 21:45:33.000000000 +0100 -+++ sprng1_ok/Makefile 2014-11-28 16:08:51.000000000 +0100 -@@ -24,17 +24,18 @@ - - include $(SRCDIR)/make.$(PLAT) - --all : src examples tests -+#all : src examples tests -+all : src tests - - #--------------------------------------------------------------------------- - src : -- (cd SRC; $(MAKE) LIBDIR=../$(LIBDIR) SRCDIR=../$(SRCDIR) PLAT=$(PLAT); cd ..) -+ (cd SRC; $(MAKE) PLAT=$(PLAT); cd ..) - - examples : -- (cd EXAMPLES; $(MAKE) LIBDIR=../$(LIBDIR) SRCDIR=../$(SRCDIR) PLAT=$(PLAT)) -+ (cd EXAMPLES; $(MAKE) all PLAT=$(PLAT)) - - tests : -- (cd TESTS; $(MAKE) LIBDIR=../$(LIBDIR) SRCDIR=../$(SRCDIR) PLAT=$(PLAT)) -+ (cd TESTS; $(MAKE) PLAT=$(PLAT) BASE=foo LIB=bar) - - #--------------------------------------------------------------------------- - clean : diff --git a/Golden_Repo/s/sprng/sprng1_2.patch b/Golden_Repo/s/sprng/sprng1_2.patch deleted file mode 100644 index bed49a0fe91195323bb543b8b71c8655d6a7e3af..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sprng/sprng1_2.patch +++ /dev/null @@ -1,283 +0,0 @@ -diff -ruN sprng/EXAMPLES/Makefile sprng1_ok/EXAMPLES/Makefile ---- sprng/EXAMPLES/Makefile 1998-05-22 16:56:59.000000000 +0200 -+++ sprng1_ok/EXAMPLES/Makefile 2015-08-06 11:23:55.374522148 +0200 -@@ -25,12 +25,18 @@ - ############################################################################ - - SHELL = /bin/sh -+PLAT = GENERIC -+MPIDEF = -DSPRNG_MPI -+MPIDIR = -+MPILIB = -+ -+PMLCGDEF = -DUSE_PMLCG -+GMP_ROOT = $(EBROOTGMP) -+GMPLIB = -Wl,-Bstatic -Wl,--start-group -L$(GMP_ROOT)/lib -lgmp -Wl,--end-group -Wl,-Bdynamic - --include ../make.CHOICES - --LIBDIR = ../$(LIB_REL_DIR) --SRCDIR = ../SRC --INCDIR = ../include -+LIBDIR = $(EBROOTSPRNG)/lib -+INCDIR = $(EBROOTSPRNG)/include - - # use 'lfg' to get Lagged Fibonacci, 'lcg' to get Linear Congruential, etc. - SPRNGLIB=lcg -@@ -47,7 +53,23 @@ - sprngf-simple_mpi fsprngf-simple_mpi seedf-simple_mpi \ - messagef-simple_mpi pi-simple_mpi - --include $(SRCDIR)/make.$(PLAT) -+CC = mpicc -+CLD = $(CC) -+F77 = mpif77 -+F77LD = $(F77) -+FFXN = -DAdd_ -+FSUFFIX = F -+ -+MPIF77 = mpif77 -+MPICC = mpicc -+ -+CFLAGS = -O3 -DLittleEndian $(PMLCGDEF) $(MPIDEF) -D$(PLAT) -+CLDFLAGS = -O3 -+ -+FFLAGS = -O2 $(PMLCGDEF) $(MPIDEF) -D$(PLAT) -DPOINTER_SIZE=8 -+F77LDFLAGS = -O3 -+ -+CPP = cpp -P -DPOINTER_SIZE=8 - - serial : $(EX) - -@@ -55,123 +77,123 @@ - - mpi : $(MPIEX) - --simple-simple : simple-simple.c $(LIBDIR)/lib$(SPRNGLIB).a -+simple-simple : simple-simple.c - $(CC) $(CFLAGS) -I$(INCDIR) -o simple-simple simple-simple.c -L$(LIBDIR) -l$(SPRNGLIB) - --sprng : sprng.c $(LIBDIR)/lib$(SPRNGLIB).a -+sprng : sprng.c - $(CC) $(CFLAGS) $(CHK) -I$(INCDIR) -o sprng sprng.c -L$(LIBDIR) -l$(SPRNGLIB) - --sprng-simple : sprng-simple.c $(LIBDIR)/lib$(SPRNGLIB).a -+sprng-simple : sprng-simple.c - $(CC) $(CFLAGS) -I$(INCDIR) -o sprng-simple sprng-simple.c -L$(LIBDIR) -l$(SPRNGLIB) - --sprng-simple_mpi : sprng-simple_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+sprng-simple_mpi : sprng-simple_mpi.c - $(MPICC) $(CFLAGS) -I$(INCDIR) -o sprng-simple_mpi sprng-simple_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --sprng_mpi : sprng_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+sprng_mpi : sprng_mpi.c - $(MPICC) $(CFLAGS) $(CHK) -I$(INCDIR) -o sprng_mpi sprng_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --fsprng_mpi : fsprng_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+fsprng_mpi : fsprng_mpi.c - $(MPICC) $(CFLAGS) $(CHK) -I$(INCDIR) -o fsprng_mpi fsprng_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --fsprng-simple_mpi : fsprng-simple_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+fsprng-simple_mpi : fsprng-simple_mpi.c - $(MPICC) $(CFLAGS) -I$(INCDIR) -o fsprng-simple_mpi fsprng-simple_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --2streams_mpi : 2streams_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+2streams_mpi : 2streams_mpi.c - $(MPICC) $(CFLAGS) $(CHK) -I$(INCDIR) -o 2streams_mpi 2streams_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --seed : seed.c $(LIBDIR)/lib$(SPRNGLIB).a -+seed : seed.c - $(CC) $(CFLAGS) $(CHK) -I$(INCDIR) -o seed seed.c -L$(LIBDIR) -l$(SPRNGLIB) - --seed-simple : seed-simple.c $(LIBDIR)/lib$(SPRNGLIB).a -+seed-simple : seed-simple.c - $(CC) $(CFLAGS) -I$(INCDIR) -o seed-simple seed-simple.c -L$(LIBDIR) -l$(SPRNGLIB) - --seed_mpi : seed_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+seed_mpi : seed_mpi.c - $(MPICC) $(CFLAGS) $(CHK) -I$(INCDIR) -o seed_mpi seed_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --seed-simple_mpi : seed-simple_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+seed-simple_mpi : seed-simple_mpi.c - $(MPICC) $(CFLAGS) -I$(INCDIR) -o seed-simple_mpi seed-simple_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --checkpoint : checkpoint.c $(LIBDIR)/lib$(SPRNGLIB).a -+checkpoint : checkpoint.c - $(CC) $(CFLAGS) $(CHK) -I$(INCDIR) -o checkpoint checkpoint.c -L$(LIBDIR) -l$(SPRNGLIB) - --checkpoint-simple : checkpoint-simple.c $(LIBDIR)/lib$(SPRNGLIB).a -+checkpoint-simple : checkpoint-simple.c - $(CC) $(CFLAGS) -I$(INCDIR) -o checkpoint-simple checkpoint-simple.c -L$(LIBDIR) -l$(SPRNGLIB) - --message_mpi : message_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+message_mpi : message_mpi.c - $(MPICC) $(CFLAGS) $(CHK) -I$(INCDIR) -o message_mpi message_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --message-simple_mpi : message-simple_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+message-simple_mpi : message-simple_mpi.c - $(MPICC) $(CFLAGS) -I$(INCDIR) -o message-simple_mpi message-simple_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --pi-simple : pi-simple.c $(LIBDIR)/lib$(SPRNGLIB).a -+pi-simple : pi-simple.c - $(CC) $(CFLAGS) $(CHK) -I$(INCDIR) -o pi-simple pi-simple.c -L$(LIBDIR) -l$(SPRNGLIB) -lm - --pi-simple_mpi : pi-simple_mpi.c $(LIBDIR)/lib$(SPRNGLIB).a -+pi-simple_mpi : pi-simple_mpi.c - $(MPICC) $(CFLAGS) -I$(INCDIR) -o pi-simple_mpi pi-simple_mpi.c -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) -lm - --spawn : spawn.c $(LIBDIR)/lib$(SPRNGLIB).a -+spawn : spawn.c - $(CC) $(CFLAGS) -DCHECK_POINTERS -I$(INCDIR) -o spawn spawn.c -L$(LIBDIR) -l$(SPRNGLIB) - --invalid_ID : invalid_ID.c $(LIBDIR)/lib$(SPRNGLIB).a -+invalid_ID : invalid_ID.c - $(CC) $(CFLAGS) -DCHECK_POINTERS -I$(INCDIR) -o invalid_ID invalid_ID.c -L$(LIBDIR) -l$(SPRNGLIB) - --sprngf : sprngf.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+sprngf : sprngf.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o sprngf sprngf.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) - --sprngf_mpi : sprngf_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+sprngf_mpi : sprngf_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o sprngf_mpi sprngf_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --fsprngf_mpi : fsprngf_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+fsprngf_mpi : fsprngf_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o fsprngf_mpi fsprngf_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --seedf : seedf.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+seedf : seedf.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o seedf seedf.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) - --seedf_mpi : seedf_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+seedf_mpi : seedf_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o seedf_mpi seedf_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --checkpointf : checkpointf.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+checkpointf : checkpointf.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o checkpointf checkpointf.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(CMDDIR) $(CMDLIB) - --messagef_mpi : messagef_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+messagef_mpi : messagef_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o messagef_mpi messagef_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - - --2streamsf_mpi : 2streamsf_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+2streamsf_mpi : 2streamsf_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o 2streamsf_mpi 2streamsf_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --invalid_IDf : invalid_IDf.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+invalid_IDf : invalid_IDf.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o invalid_IDf invalid_IDf.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) - --spawnf : spawnf.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+spawnf : spawnf.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o spawnf spawnf.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) - - --simplef-simple : simplef-simple.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+simplef-simple : simplef-simple.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o simplef-simple simplef-simple.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) - --sprngf-simple : sprngf-simple.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+sprngf-simple : sprngf-simple.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o sprngf-simple sprngf-simple.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) - --sprngf-simple_mpi : sprngf-simple_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+sprngf-simple_mpi : sprngf-simple_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o sprngf-simple_mpi sprngf-simple_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --fsprngf-simple_mpi : fsprngf-simple_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+fsprngf-simple_mpi : fsprngf-simple_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o fsprngf-simple_mpi fsprngf-simple_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --seedf-simple : seedf-simple.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+seedf-simple : seedf-simple.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o seedf-simple seedf-simple.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) - --seedf-simple_mpi : seedf-simple_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+seedf-simple_mpi : seedf-simple_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o seedf-simple_mpi seedf-simple_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --checkpointf-simple : checkpointf-simple.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+checkpointf-simple : checkpointf-simple.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o checkpointf-simple checkpointf-simple.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(CMDDIR) $(CMDLIB) - --messagef-simple_mpi : messagef-simple_mpi.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+messagef-simple_mpi : messagef-simple_mpi.$(FSUFFIX) - $(MPIF77) $(FFLAGS) -I$(INCDIR) -o messagef-simple_mpi messagef-simple_mpi.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) $(MPIDIR) $(MPILIB) - --pif-simple : pif-simple.$(FSUFFIX) $(LIBDIR)/lib$(SPRNGLIB).a -+pif-simple : pif-simple.$(FSUFFIX) - $(F77) $(FFLAGS) -I$(INCDIR) -o pif-simple pif-simple.$(FSUFFIX) -L$(LIBDIR) -l$(SPRNGLIB) -lm - - -diff -ruN sprng/EXAMPLES/batchmpi sprng1_ok/EXAMPLES/batchmpi ---- sprng/EXAMPLES/batchmpi 1970-01-01 01:00:00.000000000 +0100 -+++ sprng1_ok/EXAMPLES/batchmpi 2015-08-06 10:07:07.940171443 +0200 -@@ -0,0 +1,25 @@ -+#!/bin/bash -x -+#SBATCH --nodes=1 -+#SBATCH --ntasks=4 -+#SBATCH --time=00:10:00 -+#SBATCH --partition=batch -+srun -n 4 ./2streams_mpi >> 2streams_mpi_out -+srun -n 4 ./2streamsf_mpi >> 2streamsf_mpi_out -+srun -n 4 ./fsprng_mpi >> fsprng_mpi_out -+srun -n 4 ./fsprng-simple_mpi >> fsprng-simple_mpi_out -+srun -n 4 ./fsprngf_mpi >> fsprngf_mpi_out -+srun -n 4 ./fsprngf-simple_mpi >> fsprngf-simple_mpi_out -+srun -n 4 ./message_mpi >> message_mpi_out -+srun -n 4 ./message-simple_mpi >> message-simple_mpi_out0 -+srun -n 4 ./messagef_mpi >> messagef_mpi_out -+srun -n 4 ./messagef_mpi >> messagef_mpi_out -+srun -n 4 ./messagef-simple_mpi >> messagef-simple_mpi_out -+srun -n 4 ./seed_mpi >> seed_mpi_out -+srun -n 4 ./seed-simple_mpi >> seed-simple_mpi_out -+srun -n 4 ./seedf_mpi >> seedf_mpi_out -+srun -n 4 ./seedf-simple_mpi >> seedf-simple_mpi_out -+srun -n 4 ./sprng_mpi >> sprng_mpi_out -+srun -n 4 ./sprng-simple_mpi >> sprng-simple_mpi_out -+srun -n 4 ./sprngf_mpi >> sprngf_mpi_out -+srun -n 4 ./sprngf-simple_mpi >> sprngf-simple_mpi_out -+srun -n 4 ./pi-simple_mpi < pi.in >> pi-simple_mpi_out -diff -ruN sprng/EXAMPLES/batchseq sprng1_ok/EXAMPLES/batchseq ---- sprng/EXAMPLES/batchseq 1970-01-01 01:00:00.000000000 +0100 -+++ sprng1_ok/EXAMPLES/batchseq 2014-11-25 11:15:22.000000000 +0100 -@@ -0,0 +1,21 @@ -+./checkpoint < checkpoint.in >> checkpoint_out -+./checkpoint-simple < checkpoint.in >> checkpoint-simple_out -+./checkpointf < checkpoint.in >> checkpointf_out -+./checkpointf-simple < checkpoint.in >> checkpointf-simple_out -+./invalid_ID >> invalid_ID_out -+./invalid_IDf >> invalid_IDf_out -+./pi-simple < pi.in >> pi-simple_out -+./pif-simple < pi.in >> pif-simple_out -+./seed >> seed_out0 -+./seed-simple >> seed-simple_out -+./seedf >> seedf_out -+./seed-simple >> seed-simple_out -+./simple-simple >> simple-simple_out -+./simplef-simple >> simplef-simple_out -+./spawn >> spawn_out -+./spawnf >> spawnf_out -+./sprng >> sprng_out -+./sprng-simple >> sprng-simple_out -+./sprngf >> sprngf_out -+./sprngf-simple >> sprngf-simple_out -+ -diff -ruN sprng/EXAMPLES/checkpoint.in sprng1_ok/EXAMPLES/checkpoint.in ---- sprng/EXAMPLES/checkpoint.in 1970-01-01 01:00:00.000000000 +0100 -+++ sprng1_ok/EXAMPLES/checkpoint.in 2014-11-25 11:15:22.000000000 +0100 -@@ -0,0 +1,3 @@ -+outstr0 -+9 -+ -diff -ruN sprng/EXAMPLES/pi.in sprng1_ok/EXAMPLES/pi.in ---- sprng/EXAMPLES/pi.in 1970-01-01 01:00:00.000000000 +0100 -+++ sprng1_ok/EXAMPLES/pi.in 2014-11-25 11:15:22.000000000 +0100 -@@ -0,0 +1,4 @@ -+9 -+pi_store_0 -+10 -+ diff --git a/Golden_Repo/s/sundials/sundials-5.4.0-gomkl-2020.eb b/Golden_Repo/s/sundials/sundials-5.4.0-gomkl-2020.eb deleted file mode 100644 index c9729937994e453007a9062768eee43bc4152f80..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sundials/sundials-5.4.0-gomkl-2020.eb +++ /dev/null @@ -1,70 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# https://computation.llnl.gov/casc/sundials/main.html -# -## - -easyblock = 'CMakeMake' -name = 'sundials' -version = '5.4.0' - -homepage = 'https://computation.llnl.gov/casc/sundials/main.html' -description = """SUNDIALS is a SUite of Nonlinear and DIfferential/ALgebraic equation Solvers. It consists of the -following six solvers: CVODE, solves initial value problems for ordinary differential equation (ODE) systems; CVODES, -solves ODE systems and includes sensitivity analysis capabilities (forward and adjoint); ARKODE, solves initial value -ODE problems with additive Runge-Kutta methods, include support for IMEX methods; IDA, solves initial value problems for -differential-algebraic equation (DAE) systems; IDAS, solves DAE systems and includes sensitivity analysis capabilities -(forward and adjoint); KINSOL, solves nonlinear algebraic systems. -""" - -examples = 'Examples can be found in $EBROOTSUNDIALS/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'openmp': True} - -source_urls = ['http://computation.llnl.gov/projects/sundials/download/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -separate_build_dir = 'True' - -dependencies = [ - ('Hypre', '2.20.0'), -] - -configopts = [ - '-DBUILD_SHARED_LIBS=ON ' + - '-DMPI_ENABLE=ON ' + - '-DF77_INTERFACE_ENABLE=ON ' + - '-DOPENMP_ENABLE=ON ' + - '-DEXAMPLES_ENABLE_CXX=ON ' + - '-DSUNDIALS_BUILD_PACKAGE_FUSED_KERNELS=ON ' + - '-DLAPACK_ENABLE=ON -DLAPACK_LIBRARIES="$LIBLAPACK" ' + - '-DHYPRE_ENABLE=ON -DHYPRE_INCLUDE_DIR="$EBROOTHYPRE/include" -DHYPRE_LIBRARY_DIR="$EBROOTHYPRE/lib" ' -] - -postinstallcmds = [ - "cp -r examples %(installdir)s/examples", - "ln -s %(installdir)s/lib64 %(installdir)s/lib", -] - -sanity_check_paths = { - 'files': [], - 'dirs': ["examples", "include", "lib64"], -} - -modextravars = { - 'SUNDIALS_ROOT': '%(installdir)s', - 'SUNDIALS_LIB': '%(installdir)s/lib64', - 'SUNDIALS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sundials/sundials-5.4.0-gpsmkl-2020.eb b/Golden_Repo/s/sundials/sundials-5.4.0-gpsmkl-2020.eb deleted file mode 100644 index 27e65d2ba5172077fd17623c350ac7573cb09614..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sundials/sundials-5.4.0-gpsmkl-2020.eb +++ /dev/null @@ -1,70 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# https://computation.llnl.gov/casc/sundials/main.html -# -## - -easyblock = 'CMakeMake' -name = 'sundials' -version = '5.4.0' - -homepage = 'https://computation.llnl.gov/casc/sundials/main.html' -description = """SUNDIALS is a SUite of Nonlinear and DIfferential/ALgebraic equation Solvers. It consists of the -following six solvers: CVODE, solves initial value problems for ordinary differential equation (ODE) systems; CVODES, -solves ODE systems and includes sensitivity analysis capabilities (forward and adjoint); ARKODE, solves initial value -ODE problems with additive Runge-Kutta methods, include support for IMEX methods; IDA, solves initial value problems for -differential-algebraic equation (DAE) systems; IDAS, solves DAE systems and includes sensitivity analysis capabilities -(forward and adjoint); KINSOL, solves nonlinear algebraic systems. -""" - -examples = 'Examples can be found in $EBROOTSUNDIALS/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'openmp': True} - -source_urls = ['http://computation.llnl.gov/projects/sundials/download/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -separate_build_dir = 'True' - -dependencies = [ - ('Hypre', '2.20.0'), -] - -configopts = [ - '-DBUILD_SHARED_LIBS=ON ' + - '-DMPI_ENABLE=ON ' + - '-DF77_INTERFACE_ENABLE=ON ' + - '-DOPENMP_ENABLE=ON ' + - '-DEXAMPLES_ENABLE_CXX=ON ' + - '-DSUNDIALS_BUILD_PACKAGE_FUSED_KERNELS=ON ' + - '-DLAPACK_ENABLE=ON -DLAPACK_LIBRARIES="$LIBLAPACK" ' + - '-DHYPRE_ENABLE=ON -DHYPRE_INCLUDE_DIR="$EBROOTHYPRE/include" -DHYPRE_LIBRARY_DIR="$EBROOTHYPRE/lib" ' -] - -postinstallcmds = [ - "cp -r examples %(installdir)s/examples", - "ln -s %(installdir)s/lib64 %(installdir)s/lib", -] - -sanity_check_paths = { - 'files': [], - 'dirs': ["examples", "include", "lib64"], -} - -modextravars = { - 'SUNDIALS_ROOT': '%(installdir)s', - 'SUNDIALS_LIB': '%(installdir)s/lib64', - 'SUNDIALS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sundials/sundials-5.4.0-intel-2020.eb b/Golden_Repo/s/sundials/sundials-5.4.0-intel-2020.eb deleted file mode 100644 index 4667792b11749173e4c1f72c0266f9ca03dc245b..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sundials/sundials-5.4.0-intel-2020.eb +++ /dev/null @@ -1,70 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# https://computation.llnl.gov/casc/sundials/main.html -# -## - -easyblock = 'CMakeMake' -name = 'sundials' -version = '5.4.0' - -homepage = 'https://computation.llnl.gov/casc/sundials/main.html' -description = """SUNDIALS is a SUite of Nonlinear and DIfferential/ALgebraic equation Solvers. It consists of the -following six solvers: CVODE, solves initial value problems for ordinary differential equation (ODE) systems; CVODES, -solves ODE systems and includes sensitivity analysis capabilities (forward and adjoint); ARKODE, solves initial value -ODE problems with additive Runge-Kutta methods, include support for IMEX methods; IDA, solves initial value problems for -differential-algebraic equation (DAE) systems; IDAS, solves DAE systems and includes sensitivity analysis capabilities -(forward and adjoint); KINSOL, solves nonlinear algebraic systems. -""" - -examples = 'Examples can be found in $EBROOTSUNDIALS/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'openmp': True} - -source_urls = ['http://computation.llnl.gov/projects/sundials/download/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -separate_build_dir = 'True' - -dependencies = [ - ('Hypre', '2.20.0'), -] - -configopts = [ - '-DBUILD_SHARED_LIBS=ON ' + - '-DMPI_ENABLE=ON ' + - '-DF77_INTERFACE_ENABLE=ON ' + - '-DOPENMP_ENABLE=ON ' + - '-DEXAMPLES_ENABLE_CXX=ON ' + - '-DSUNDIALS_BUILD_PACKAGE_FUSED_KERNELS=ON ' + - '-DLAPACK_ENABLE=ON -DLAPACK_LIBRARIES="$LIBLAPACK" ' + - '-DHYPRE_ENABLE=ON -DHYPRE_INCLUDE_DIR="$EBROOTHYPRE/include" -DHYPRE_LIBRARY_DIR="$EBROOTHYPRE/lib" ' -] - -postinstallcmds = [ - "cp -r examples %(installdir)s/examples", - "ln -s %(installdir)s/lib64 %(installdir)s/lib", -] - -sanity_check_paths = { - 'files': [], - 'dirs': ["examples", "include", "lib64"], -} - -modextravars = { - 'SUNDIALS_ROOT': '%(installdir)s', - 'SUNDIALS_LIB': '%(installdir)s/lib64', - 'SUNDIALS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sundials/sundials-5.4.0-intel-para-2020.eb b/Golden_Repo/s/sundials/sundials-5.4.0-intel-para-2020.eb deleted file mode 100644 index f79ddf375d4cad1ddfdad8a394d24e4cfd679c6d..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sundials/sundials-5.4.0-intel-para-2020.eb +++ /dev/null @@ -1,70 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# https://computation.llnl.gov/casc/sundials/main.html -# -## - -easyblock = 'CMakeMake' -name = 'sundials' -version = '5.4.0' - -homepage = 'https://computation.llnl.gov/casc/sundials/main.html' -description = """SUNDIALS is a SUite of Nonlinear and DIfferential/ALgebraic equation Solvers. It consists of the -following six solvers: CVODE, solves initial value problems for ordinary differential equation (ODE) systems; CVODES, -solves ODE systems and includes sensitivity analysis capabilities (forward and adjoint); ARKODE, solves initial value -ODE problems with additive Runge-Kutta methods, include support for IMEX methods; IDA, solves initial value problems for -differential-algebraic equation (DAE) systems; IDAS, solves DAE systems and includes sensitivity analysis capabilities -(forward and adjoint); KINSOL, solves nonlinear algebraic systems. -""" - -examples = 'Examples can be found in $EBROOTSUNDIALS/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'openmp': True} - -source_urls = ['http://computation.llnl.gov/projects/sundials/download/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -separate_build_dir = 'True' - -dependencies = [ - ('Hypre', '2.20.0'), -] - -configopts = [ - '-DBUILD_SHARED_LIBS=ON ' + - '-DMPI_ENABLE=ON ' + - '-DF77_INTERFACE_ENABLE=ON ' + - '-DOPENMP_ENABLE=ON ' + - '-DEXAMPLES_ENABLE_CXX=ON ' + - '-DSUNDIALS_BUILD_PACKAGE_FUSED_KERNELS=ON ' + - '-DLAPACK_ENABLE=ON -DLAPACK_LIBRARIES="$LIBLAPACK" ' + - '-DHYPRE_ENABLE=ON -DHYPRE_INCLUDE_DIR="$EBROOTHYPRE/include" -DHYPRE_LIBRARY_DIR="$EBROOTHYPRE/lib" ' -] - -postinstallcmds = [ - "cp -r examples %(installdir)s/examples", - "ln -s %(installdir)s/lib64 %(installdir)s/lib", -] - -sanity_check_paths = { - 'files': [], - 'dirs': ["examples", "include", "lib64"], -} - -modextravars = { - 'SUNDIALS_ROOT': '%(installdir)s', - 'SUNDIALS_LIB': '%(installdir)s/lib64', - 'SUNDIALS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/s/sundials/sundials-5.4.0-iomkl-2020.eb b/Golden_Repo/s/sundials/sundials-5.4.0-iomkl-2020.eb deleted file mode 100644 index 46444b9c3a0b290f941b467999780f0dd6d188a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/s/sundials/sundials-5.4.0-iomkl-2020.eb +++ /dev/null @@ -1,70 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Authors:: Inge Gutheil <i.gutheil@fz-juelich.de> -# -# https://computation.llnl.gov/casc/sundials/main.html -# -## - -easyblock = 'CMakeMake' -name = 'sundials' -version = '5.4.0' - -homepage = 'https://computation.llnl.gov/casc/sundials/main.html' -description = """SUNDIALS is a SUite of Nonlinear and DIfferential/ALgebraic equation Solvers. It consists of the -following six solvers: CVODE, solves initial value problems for ordinary differential equation (ODE) systems; CVODES, -solves ODE systems and includes sensitivity analysis capabilities (forward and adjoint); ARKODE, solves initial value -ODE problems with additive Runge-Kutta methods, include support for IMEX methods; IDA, solves initial value problems for -differential-algebraic equation (DAE) systems; IDAS, solves DAE systems and includes sensitivity analysis capabilities -(forward and adjoint); KINSOL, solves nonlinear algebraic systems. -""" - -examples = 'Examples can be found in $EBROOTSUNDIALS/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2020'} -toolchainopts = {'optarch': True, 'usempi': True, 'openmp': True} - -source_urls = ['http://computation.llnl.gov/projects/sundials/download/'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -separate_build_dir = 'True' - -dependencies = [ - ('Hypre', '2.20.0'), -] - -configopts = [ - '-DBUILD_SHARED_LIBS=ON ' + - '-DMPI_ENABLE=ON ' + - '-DF77_INTERFACE_ENABLE=ON ' + - '-DOPENMP_ENABLE=ON ' + - '-DEXAMPLES_ENABLE_CXX=ON ' + - '-DSUNDIALS_BUILD_PACKAGE_FUSED_KERNELS=ON ' + - '-DLAPACK_ENABLE=ON -DLAPACK_LIBRARIES="$LIBLAPACK" ' + - '-DHYPRE_ENABLE=ON -DHYPRE_INCLUDE_DIR="$EBROOTHYPRE/include" -DHYPRE_LIBRARY_DIR="$EBROOTHYPRE/lib" ' -] - -postinstallcmds = [ - "cp -r examples %(installdir)s/examples", - "ln -s %(installdir)s/lib64 %(installdir)s/lib", -] - -sanity_check_paths = { - 'files': [], - 'dirs': ["examples", "include", "lib64"], -} - -modextravars = { - 'SUNDIALS_ROOT': '%(installdir)s', - 'SUNDIALS_LIB': '%(installdir)s/lib64', - 'SUNDIALS_INCLUDE': '%(installdir)s/include' -} - -moduleclass = 'math' diff --git a/Golden_Repo/t/Tcl/Tcl-8.6.10-GCCcore-10.3.0.eb b/Golden_Repo/t/Tcl/Tcl-8.6.10-GCCcore-10.3.0.eb deleted file mode 100644 index 89585e93188f7bf85f8342d3a145241481b55004..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/Tcl/Tcl-8.6.10-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Tcl' -version = '8.6.10' - -homepage = 'http://www.tcl.tk/' -description = """Tcl (Tool Command Language) is a very powerful but easy to learn dynamic programming language, -suitable for a very wide range of uses, including web and desktop applications, networking, -administration, testing and many more. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ["ftp://ftp.tcl.tk/pub/tcl/tcl8_6"] -sources = ['%(namelower)s%(version)s-src.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = '--enable-threads EXTRA_INSTALL="install-private-headers"' - -runtest = 'test' - -start_dir = 'unix' - -moduleclass = 'lang' diff --git a/Golden_Repo/t/Tcl/Tcl-8.6.10-GCCcore-9.3.0.eb b/Golden_Repo/t/Tcl/Tcl-8.6.10-GCCcore-9.3.0.eb deleted file mode 100644 index c4030bcec11abac4bd4122810e98f0e7eed33b9e..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/Tcl/Tcl-8.6.10-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Tcl' -version = '8.6.10' - -homepage = 'http://www.tcl.tk/' -description = """Tcl (Tool Command Language) is a very powerful but easy to learn dynamic programming language, -suitable for a very wide range of uses, including web and desktop applications, networking, -administration, testing and many more. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ["ftp://ftp.tcl.tk/pub/tcl/tcl8_6"] -sources = ['%(namelower)s%(version)s-src.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('zlib', '1.2.11'), -] - -configopts = '--enable-threads EXTRA_INSTALL="install-private-headers"' - -runtest = 'test' - -start_dir = 'unix' - -moduleclass = 'lang' diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.1.0_fix-cuda-build.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.1.0_fix-cuda-build.patch deleted file mode 100644 index 5873bdc5ed029b3ba97d010d50ae2117e4b15ab7..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.1.0_fix-cuda-build.patch +++ /dev/null @@ -1,47 +0,0 @@ -fix for "undeclared inclusion(s) in rule" errors when building TensorFlow 1.14.0 with CUDA support, -if the installation directory for GCC is hosted in a path that is a symlink to another path; -the symlinked path is resolved in several places (both by 'gcc' itself and by the TF build process), -which makes hard comparisons between paths fail -author: Alexander Grund based on original patch by Kenneth Hoste (HPC-UGent) -diff --git a/third_party/gpus/cuda_configure.bzl b/third_party/gpus/cuda_configure.bzl -index ba4bd8ad75..dab492ecda 100644 ---- a/third_party/gpus/cuda_configure.bzl -+++ b/third_party/gpus/cuda_configure.bzl -@@ -303,11 +303,36 @@ def _get_cxx_inc_directories_impl(repository_ctx, cc, lang_is_cpp): - else: - inc_dirs = result.stderr[index1 + 1:index2].strip() - -- return [ -+ compiler_includes = [ - _normalize_include_path(repository_ctx, _cxx_inc_convert(p)) - for p in inc_dirs.split("\n") - ] - -+ # fix include path by also including paths where resolved symlink is replaced by original path -+ # Try to find real path to CC installation to "see through" compiler wrappers -+ # GCC has the path to g++ -+ index1 = result.stderr.find("COLLECT_GCC=") -+ if index1 != -1: -+ index1 = result.stderr.find("=", index1) -+ index2 = result.stderr.find("\n", index1) -+ cc_topdir = repository_ctx.path(result.stderr[index1 + 1 : index2]).dirname.dirname -+ else: -+ # Clang has the directory -+ index1 = result.stderr.find("InstalledDir: ") -+ if index1 != -1: -+ index1 = result.stderr.find(" ", index1) -+ index2 = result.stderr.find("\n", index1) -+ cc_topdir = repository_ctx.path(result.stderr[index1 + 1 : index2]).dirname -+ else: -+ # Fallback to the CC path -+ cc_topdir = repository_ctx.path(cc).dirname.dirname -+ cc_topdir_resolved = str(cc_topdir.realpath).strip() -+ cc_topdir = str(cc_topdir).strip() -+ if cc_topdir_resolved != cc_topdir: -+ original_compiler_includes = [p.replace(cc_topdir_resolved, cc_topdir) for p in compiler_includes] -+ compiler_includes = compiler_includes + original_compiler_includes -+ return compiler_includes -+ - def get_cxx_inc_directories(repository_ctx, cc, tf_sysroot): - """Compute the list of default C and C++ include directories.""" - diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.1.0_fix-system-nasm.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.1.0_fix-system-nasm.patch deleted file mode 100644 index 5a3341ecaa9d2e28fe6c7d1875b30e73e8bb730a..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.1.0_fix-system-nasm.patch +++ /dev/null @@ -1,28 +0,0 @@ -commit 5b3b9c7fe7501acd6bd69abe26fd3f9e0f1df4ef -Author: Alexander Grund <alexander.grund@tu-dresden.de> -Date: Wed Aug 12 15:48:17 2020 +0200 - - Use nasmlink genrule - - Avoids cyclic dependency as the name and src must not be the same - -diff --git a/third_party/nasm/BUILD.system b/third_party/nasm/BUILD.system -index 7f74da7595..52f608187f 100644 ---- a/third_party/nasm/BUILD.system -+++ b/third_party/nasm/BUILD.system -@@ -5,8 +5,14 @@ filegroup( - visibility = ["//visibility:public"], - ) - -+genrule( -+ name = "lnnasmlink", -+ outs = ["nasmlink"], -+ cmd = "ln -s $$(which nasm) $@", -+) -+ - sh_binary( - name = "nasm", -- srcs = ["nasm"], -+ srcs = ["nasmlink"], - visibility = ["@libjpeg_turbo//:__pkg__"], - ) diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-json-include-style.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-json-include-style.patch deleted file mode 100644 index 8e2429a01fc20b61266e2c8988a4689d2430671a..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-json-include-style.patch +++ /dev/null @@ -1,137 +0,0 @@ -From 20a00a0328fe1ad08ef51311b93d72d939c142e7 Mon Sep 17 00:00:00 2001 -From: Alexander Grund <alexander.grund@tu-dresden.de> -Date: Thu, 20 Aug 2020 16:39:40 +0200 -Subject: [PATCH] Include jsonCPP headers via #include "json/json.h" - -Don't use #include "include/json/json.h" which is unusual and therefore confusing -This allows to remove the header symlinking done for the system lib version - -Closes #42303 ---- - .../core/platform/cloud/gcs_file_system.cc | 2 +- - .../platform/cloud/google_auth_provider.cc | 2 +- - tensorflow/core/platform/cloud/oauth_client.h | 2 +- - .../profiler/convert/trace_events_to_json.cc | 2 +- - .../convert/trace_events_to_json_test.cc | 2 +- - .../core/profiler/internal/tfprof_timeline.h | 2 +- - third_party/systemlibs/jsoncpp.BUILD | 27 ------------------- - 9 files changed, 8 insertions(+), 35 deletions(-) - -diff --git a/tensorflow/core/platform/cloud/gcs_file_system.cc b/tensorflow/core/platform/cloud/gcs_file_system.cc -index f0d2138b379ea..59eb610fc5377 100644 ---- a/tensorflow/core/platform/cloud/gcs_file_system.cc -+++ b/tensorflow/core/platform/cloud/gcs_file_system.cc -@@ -30,7 +30,7 @@ limitations under the License. - #include <io.h> // for _mktemp - #endif - #include "absl/base/macros.h" --#include "include/json/json.h" -+#include "json/json.h" - #include "tensorflow/core/lib/gtl/map_util.h" - #include "tensorflow/core/platform/cloud/curl_http_request.h" - #include "tensorflow/core/platform/cloud/file_block_cache.h" -diff --git a/tensorflow/core/platform/cloud/google_auth_provider.cc b/tensorflow/core/platform/cloud/google_auth_provider.cc -index e8546ca022f22..57240fa2494b8 100644 ---- a/tensorflow/core/platform/cloud/google_auth_provider.cc -+++ b/tensorflow/core/platform/cloud/google_auth_provider.cc -@@ -24,7 +24,7 @@ limitations under the License. - #include <utility> - - #include "absl/strings/match.h" --#include "include/json/json.h" -+#include "json/json.h" - #include "tensorflow/core/platform/base64.h" - #include "tensorflow/core/platform/env.h" - #include "tensorflow/core/platform/errors.h" -diff --git a/tensorflow/core/platform/cloud/oauth_client.h b/tensorflow/core/platform/cloud/oauth_client.h -index ed8bf2572535e..97af3ecaf1785 100644 ---- a/tensorflow/core/platform/cloud/oauth_client.h -+++ b/tensorflow/core/platform/cloud/oauth_client.h -@@ -18,7 +18,7 @@ limitations under the License. - - #include <memory> - --#include "include/json/json.h" -+#include "json/json.h" - #include "tensorflow/core/platform/cloud/http_request.h" - #include "tensorflow/core/platform/env.h" - #include "tensorflow/core/platform/status.h" -diff --git a/tensorflow/core/profiler/convert/trace_events_to_json.cc b/tensorflow/core/profiler/convert/trace_events_to_json.cc -index ba3e4516c8cae..ad40292ceffb3 100644 ---- a/tensorflow/core/profiler/convert/trace_events_to_json.cc -+++ b/tensorflow/core/profiler/convert/trace_events_to_json.cc -@@ -21,7 +21,7 @@ limitations under the License. - - #include "absl/strings/str_cat.h" - #include "absl/strings/str_format.h" --#include "include/json/json.h" -+#include "json/json.h" - #include "tensorflow/core/platform/types.h" - #include "tensorflow/core/profiler/protobuf/trace_events.pb.h" - -diff --git a/tensorflow/core/profiler/convert/trace_events_to_json_test.cc b/tensorflow/core/profiler/convert/trace_events_to_json_test.cc -index dc985f2f76feb..bf08a19e0221e 100644 ---- a/tensorflow/core/profiler/convert/trace_events_to_json_test.cc -+++ b/tensorflow/core/profiler/convert/trace_events_to_json_test.cc -@@ -15,7 +15,7 @@ limitations under the License. - - #include "tensorflow/core/profiler/convert/trace_events_to_json.h" - --#include "include/json/json.h" -+#include "json/json.h" - #include "tensorflow/core/platform/protobuf.h" - #include "tensorflow/core/platform/test.h" - #include "tensorflow/core/profiler/protobuf/trace_events.pb.h" -diff --git a/tensorflow/core/profiler/internal/tfprof_timeline.h b/tensorflow/core/profiler/internal/tfprof_timeline.h -index 834e3c9be911e..fb9ff8012e06f 100644 ---- a/tensorflow/core/profiler/internal/tfprof_timeline.h -+++ b/tensorflow/core/profiler/internal/tfprof_timeline.h -@@ -17,7 +17,7 @@ limitations under the License. - #define TENSORFLOW_CORE_PROFILER_INTERNAL_TFPROF_TIMELINE_H_ - - #include "absl/strings/str_cat.h" --#include "include/json/json.h" -+#include "json/json.h" - #include "tensorflow/core/framework/graph.pb.h" - #include "tensorflow/core/framework/step_stats.pb.h" - #include "tensorflow/core/profiler/internal/tfprof_node_show.h" -diff --git a/third_party/systemlibs/jsoncpp.BUILD b/third_party/systemlibs/jsoncpp.BUILD -index 7d54f9289bfc7..b5951e3a3404f 100644 ---- a/third_party/systemlibs/jsoncpp.BUILD -+++ b/third_party/systemlibs/jsoncpp.BUILD -@@ -5,35 +5,8 @@ filegroup( - visibility = ["//visibility:public"], - ) - --HEADERS = [ -- "include/json/allocator.h", -- "include/json/assertions.h", -- "include/json/autolink.h", -- "include/json/config.h", -- "include/json/features.h", -- "include/json/forwards.h", -- "include/json/json.h", -- "include/json/reader.h", -- "include/json/value.h", -- "include/json/version.h", -- "include/json/writer.h", --] -- --genrule( -- name = "link_headers", -- outs = HEADERS, -- cmd = """ -- for i in $(OUTS); do -- i=$${i##*/} -- ln -sf $(INCLUDEDIR)/jsoncpp/json/$$i $(@D)/include/json/$$i -- done -- """, --) -- - cc_library( - name = "jsoncpp", -- hdrs = HEADERS, -- includes = ["."], - linkopts = ["-ljsoncpp"], - visibility = ["//visibility:public"], - ) diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-protoc-build.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-protoc-build.patch deleted file mode 100644 index d9c98c14dcb9d4a9b8d11e785cd3b46e13867b8c..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-protoc-build.patch +++ /dev/null @@ -1,37 +0,0 @@ -commit c8231eb55bdbff94c44e4b554321017fbd01bda9 -Author: Alexander Grund <alexander.grund@tu-dresden.de> -Date: Wed Jul 29 16:57:39 2020 +0200 - - Fix environment for protobuf compilation - - Patch using upstream https://github.com/grpc/grpc/pull/23664 - -https://github.com/tensorflow/tensorflow/pull/41889 -diff --git a/tensorflow/workspace.bzl b/tensorflow/workspace.bzl -index 24446d846c..ee63c4a811 100755 ---- a/tensorflow/workspace.bzl -+++ b/tensorflow/workspace.bzl -@@ -687,6 +687,7 @@ def tf_repositories(path_prefix = "", tf_repo_name = ""): - sha256 = "b956598d8cbe168b5ee717b5dafa56563eb5201a947856a6688bbeac9cac4e1f", - strip_prefix = "grpc-b54a5b338637f92bfcf4b0bc05e0f57a5fd8fadd", - system_build_file = clean_dep("//third_party/systemlibs:grpc.BUILD"), -+ patch_file = clean_dep("//third_party/grpc:generate_cc_env_fix.patch"), - system_link_files = { - "//third_party/systemlibs:BUILD": "bazel/BUILD", - "//third_party/systemlibs:grpc.BUILD": "src/compiler/BUILD", -diff --git a/third_party/grpc/generate_cc_env_fix.patch b/third_party/grpc/generate_cc_env_fix.patch -new file mode 100644 -index 0000000000..51832fe962 ---- /dev/null -+++ b/third_party/grpc/generate_cc_env_fix.patch -@@ -0,0 +1,10 @@ -+--- a/bazel/generate_cc.bzl -++++ b/bazel/generate_cc.bzl -+@@ -141,6 +141,7 @@ def generate_cc_impl(ctx): -+ outputs = out_files, -+ executable = ctx.executable._protoc, -+ arguments = arguments, -++ use_default_shell_env = True, -+ ) -+ -+ return struct(files = depset(out_files)) diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system-protobuf.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system-protobuf.patch deleted file mode 100644 index f7447d6300e2db7545eedc4243ea308fdec58ce1..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system-protobuf.patch +++ /dev/null @@ -1,119 +0,0 @@ -From ab6c3bf9d98d7627509865e91913c25a1f8cc693 Mon Sep 17 00:00:00 2001 -From: Steven Clarkson <sc@lambdal.com> -Date: Sat, 22 Aug 2020 16:57:25 -0700 -Subject: [PATCH] Add missing systemlib protobuf libraries - ---- - third_party/systemlibs/protobuf.BUILD | 83 +++++++++++++++++++++++++++ - 1 file changed, 83 insertions(+) - -diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD -index 118135d129005..ef3e0c93c6483 100644 ---- a/third_party/systemlibs/protobuf.BUILD -+++ b/third_party/systemlibs/protobuf.BUILD -@@ -15,8 +15,13 @@ filegroup( - HEADERS = [ - "google/protobuf/any.pb.h", - "google/protobuf/any.proto", -+ "google/protobuf/api.pb.h", -+ "google/protobuf/api.proto", - "google/protobuf/arena.h", - "google/protobuf/compiler/importer.h", -+ "google/protobuf/compiler/plugin.h", -+ "google/protobuf/compiler/plugin.pb.h", -+ "google/protobuf/compiler/plugin.proto", - "google/protobuf/descriptor.h", - "google/protobuf/descriptor.pb.h", - "google/protobuf/descriptor.proto", -@@ -32,9 +37,15 @@ HEADERS = [ - "google/protobuf/io/zero_copy_stream_impl_lite.h", - "google/protobuf/map.h", - "google/protobuf/repeated_field.h", -+ "google/protobuf/source_context.pb.h", -+ "google/protobuf/source_context.proto", -+ "google/protobuf/struct.pb.h", -+ "google/protobuf/struct.proto", - "google/protobuf/text_format.h", - "google/protobuf/timestamp.pb.h", - "google/protobuf/timestamp.proto", -+ "google/protobuf/type.pb.h", -+ "google/protobuf/type.proto", - "google/protobuf/util/json_util.h", - "google/protobuf/util/type_resolver_util.h", - "google/protobuf/wrappers.pb.h", -@@ -102,3 +113,75 @@ py_library( - srcs_version = "PY2AND3", - visibility = ["//visibility:public"], - ) -+ -+proto_library( -+ name = "any_proto", -+ srcs = ["google/protobuf/any.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "api_proto", -+ srcs = ["google/protobuf/api.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "compiler_plugin_proto", -+ srcs = ["google/protobuf/compiler/plugin.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "descriptor_proto", -+ srcs = ["google/protobuf/descriptor.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "duration_proto", -+ srcs = ["google/protobuf/duration.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "empty_proto", -+ srcs = ["google/protobuf/empty.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "field_mask_proto", -+ srcs = ["google/protobuf/field_mask.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "source_context_proto", -+ srcs = ["google/protobuf/source_context.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "struct_proto", -+ srcs = ["google/protobuf/struct.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "timestamp_proto", -+ srcs = ["google/protobuf/timestamp.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "type_proto", -+ srcs = ["google/protobuf/type.proto"], -+ visibility = ["//visibility:public"], -+) -+ -+proto_library( -+ name = "wrappers_proto", -+ srcs = ["google/protobuf/wrappers.proto"], -+ visibility = ["//visibility:public"], -+) diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system-protobuf2.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system-protobuf2.patch deleted file mode 100644 index 4eac729e65540d51cd155f8406fed7f1eda37fda..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system-protobuf2.patch +++ /dev/null @@ -1,103 +0,0 @@ -From 32c2048560940dcc3c475eefdc857bb6202d18e0 Mon Sep 17 00:00:00 2001 -From: Alexander Grund <alexander.grund@tu-dresden.de> -Date: Fri, 11 Sep 2020 17:14:34 +0200 -Subject: [PATCH] Do not symlink system protobuf headers but only required - .proto files - -Symlinking the system headers has proven to be problematic as newer -versions of protobuf add or remove headers which makes having a static -array of header files hard to impossible. Turns out the headers don't -need to be symlinked at all but only the .proto files used as inputs -need to be present. ---- - third_party/systemlibs/protobuf.BUILD | 35 +++------------------------ - 1 file changed, 3 insertions(+), 32 deletions(-) - -diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD -index ef3e0c93c6483..ccf2ab4dc7d8b 100644 ---- a/third_party/systemlibs/protobuf.BUILD -+++ b/third_party/systemlibs/protobuf.BUILD -@@ -12,49 +12,24 @@ filegroup( - visibility = ["//visibility:public"], - ) - --HEADERS = [ -- "google/protobuf/any.pb.h", -+PROTO_FILES = [ - "google/protobuf/any.proto", -- "google/protobuf/api.pb.h", - "google/protobuf/api.proto", -- "google/protobuf/arena.h", -- "google/protobuf/compiler/importer.h", -- "google/protobuf/compiler/plugin.h", -- "google/protobuf/compiler/plugin.pb.h", - "google/protobuf/compiler/plugin.proto", -- "google/protobuf/descriptor.h", -- "google/protobuf/descriptor.pb.h", - "google/protobuf/descriptor.proto", -- "google/protobuf/duration.pb.h", - "google/protobuf/duration.proto", -- "google/protobuf/dynamic_message.h", -- "google/protobuf/empty.pb.h", - "google/protobuf/empty.proto", -- "google/protobuf/field_mask.pb.h", - "google/protobuf/field_mask.proto", -- "google/protobuf/io/coded_stream.h", -- "google/protobuf/io/zero_copy_stream.h", -- "google/protobuf/io/zero_copy_stream_impl_lite.h", -- "google/protobuf/map.h", -- "google/protobuf/repeated_field.h", -- "google/protobuf/source_context.pb.h", - "google/protobuf/source_context.proto", -- "google/protobuf/struct.pb.h", - "google/protobuf/struct.proto", -- "google/protobuf/text_format.h", -- "google/protobuf/timestamp.pb.h", - "google/protobuf/timestamp.proto", -- "google/protobuf/type.pb.h", - "google/protobuf/type.proto", -- "google/protobuf/util/json_util.h", -- "google/protobuf/util/type_resolver_util.h", -- "google/protobuf/wrappers.pb.h", - "google/protobuf/wrappers.proto", - ] - - genrule( -- name = "link_headers", -- outs = HEADERS, -+ name = "link_proto_files", -+ outs = PROTO_FILES, - cmd = """ - for i in $(OUTS); do - f=$${i#$(@D)/} -@@ -66,14 +41,12 @@ genrule( - - cc_library( - name = "protobuf", -- hdrs = HEADERS, - linkopts = ["-lprotobuf"], - visibility = ["//visibility:public"], - ) - - cc_library( - name = "protobuf_headers", -- hdrs = HEADERS, - linkopts = ["-lprotobuf"], - visibility = ["//visibility:public"], - ) -@@ -94,7 +67,6 @@ genrule( - - cc_proto_library( - name = "cc_wkt_protos", -- hdrs = HEADERS, - internal_bootstrap_hack = 1, - protoc = ":protoc", - visibility = ["//visibility:public"], -@@ -109,7 +81,6 @@ proto_gen( - - py_library( - name = "protobuf_python", -- data = [":link_headers"], - srcs_version = "PY2AND3", - visibility = ["//visibility:public"], - ) diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system_absl_py.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system_absl_py.patch deleted file mode 100644 index abe1835f0bac4890b9cc6066b916ed5ffc316166..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_fix-system_absl_py.patch +++ /dev/null @@ -1,55 +0,0 @@ -From fda56a4c619d093108cd2778e74073d65fb0e407 Mon Sep 17 00:00:00 2001 -From: Balint Cristian <cristian.balint@gmail.com> -Date: Wed, 24 Jun 2020 12:10:23 +0300 -Subject: [PATCH] [EXT-SYSLIB] Add absl_py logging submodule to build flow. - -From https://github.com/tensorflow/tensorflow/pull/40749 - ---- - tensorflow/opensource_only.files | 1 + - tensorflow/workspace.bzl | 1 + - third_party/systemlibs/absl_py.absl.logging.BUILD | 11 +++++++++++ - 3 files changed, 13 insertions(+) - create mode 100644 third_party/systemlibs/absl_py.absl.logging.BUILD - -diff --git a/tensorflow/opensource_only.files b/tensorflow/opensource_only.files -index 3d57e5f2089c5..cddf018bb21f7 100644 ---- a/tensorflow/opensource_only.files -+++ b/tensorflow/opensource_only.files -@@ -170,6 +170,7 @@ tensorflow/third_party/systemlibs/BUILD.tpl - tensorflow/third_party/systemlibs/absl_py.BUILD - tensorflow/third_party/systemlibs/absl_py.absl.flags.BUILD - tensorflow/third_party/systemlibs/absl_py.absl.testing.BUILD -+tensorflow/third_party/systemlibs/absl_py.absl.logging.BUILD - tensorflow/third_party/systemlibs/astor.BUILD - tensorflow/third_party/systemlibs/boringssl.BUILD - tensorflow/third_party/systemlibs/build_defs.bzl.tpl -diff --git a/tensorflow/workspace.bzl b/tensorflow/workspace.bzl -index f2d0c028c5fa7..d142910619c09 100755 ---- a/tensorflow/workspace.bzl -+++ b/tensorflow/workspace.bzl -@@ -535,6 +535,7 @@ def tf_repositories(path_prefix = "", tf_repo_name = ""): - "//third_party/systemlibs:absl_py.absl.BUILD": "absl/BUILD", - "//third_party/systemlibs:absl_py.absl.flags.BUILD": "absl/flags/BUILD", - "//third_party/systemlibs:absl_py.absl.testing.BUILD": "absl/testing/BUILD", -+ "//third_party/systemlibs:absl_py.absl.logging.BUILD": "absl/logging/BUILD", - }, - urls = [ - "https://storage.googleapis.com/mirror.tensorflow.org/github.com/abseil/abseil-py/archive/pypi-v0.9.0.tar.gz", -diff --git a/third_party/systemlibs/absl_py.absl.logging.BUILD b/third_party/systemlibs/absl_py.absl.logging.BUILD -new file mode 100644 -index 0000000000000..71cfc7a247c8a ---- /dev/null -+++ b/third_party/systemlibs/absl_py.absl.logging.BUILD -@@ -0,0 +1,11 @@ -+licenses(["notice"]) # Apache 2.0 -+ -+package(default_visibility = ["//visibility:public"]) -+ -+filegroup( -+ name = "LICENSE", -+) -+ -+py_library( -+ name = "logging", -+) diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_revert-tools-to-exectools-renaming.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_revert-tools-to-exectools-renaming.patch deleted file mode 100644 index 0791910e124ca51f9ea3588f1ab0fd68bbf578fa..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.0_revert-tools-to-exectools-renaming.patch +++ /dev/null @@ -1,56 +0,0 @@ -This reverts the following commit. That change caused action_env variables to not be -passed through breaking build using system dependencies. -Author: Alexander Grund (TU Dresden) - -From f827c023906e7d30f0e5f2992b111ab34153310a Mon Sep 17 00:00:00 2001 -From: "A. Unique TensorFlower" <gardener@tensorflow.org> -Date: Wed, 4 Mar 2020 22:27:03 -0800 -Subject: [PATCH] Use `exec_tools` rather than `tools` in tensorflow.bzl to - allow transitive dependencies to use python3 tools and deps in genrules - -PiperOrigin-RevId: 299023196 -Change-Id: I99981272330b060e9581c5c2a2f3992d524ae287 ---- - tensorflow/tensorflow.bzl | 12 ++++++------ - 1 file changed, 6 insertions(+), 6 deletions(-) - -diff --git a/tensorflow/tensorflow.bzl b/tensorflow/tensorflow.bzl -index b77b923cd4d4e..e4b3fa9ae90e5 100644 ---- a/tensorflow/tensorflow.bzl -+++ b/tensorflow/tensorflow.bzl -@@ -767,7 +767,7 @@ def tf_gen_op_wrapper_cc( - out_ops_file + "_internal.cc", - ], - srcs = srcs, -- exec_tools = [":" + tool] + tf_binary_additional_srcs(), -+ tools = [":" + tool] + tf_binary_additional_srcs(), - cmd = ("$(location :" + tool + ") $(location :" + out_ops_file + ".h) " + - "$(location :" + out_ops_file + ".cc) " + - str(include_internal_ops) + " " + api_def_args_str), -@@ -969,7 +969,7 @@ def tf_gen_op_wrapper_py( - name = name + "_pygenrule", - outs = [out], - srcs = api_def_srcs + [hidden_file], -- exec_tools = [tool_name] + tf_binary_additional_srcs(), -+ tools = [tool_name] + tf_binary_additional_srcs(), - cmd = ("$(location " + tool_name + ") " + api_def_args_str + - " @$(location " + hidden_file + ") > $@"), - ) -@@ -978,7 +978,7 @@ def tf_gen_op_wrapper_py( - name = name + "_pygenrule", - outs = [out], - srcs = api_def_srcs, -- exec_tools = [tool_name] + tf_binary_additional_srcs(), -+ tools = [tool_name] + tf_binary_additional_srcs(), - cmd = ("$(location " + tool_name + ") " + api_def_args_str + " " + - op_list_arg + " " + - ("1" if op_list_is_whitelist else "0") + " > $@"), -@@ -2430,7 +2430,7 @@ def tf_generate_proto_text_sources(name, srcs_relative_dir, srcs, protodeps = [] - cmd = - "$(location //tensorflow/tools/proto_text:gen_proto_text_functions) " + - "$(@D) " + srcs_relative_dir + " $(SRCS)", -- exec_tools = [ -+ tools = [ - clean_dep("//tensorflow/tools/proto_text:gen_proto_text_functions"), - ], - ) diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.1-fix-numpy-gcc10.patch b/Golden_Repo/t/TensorFlow/TensorFlow-2.3.1-fix-numpy-gcc10.patch deleted file mode 100644 index 0170f02644ab1a74a17cc7be446c83d49d893bbc..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.1-fix-numpy-gcc10.patch +++ /dev/null @@ -1,31 +0,0 @@ -diff --git a/tensorflow/python/lib/core/bfloat16.cc b/tensorflow/python/lib/core/bfloat16.cc -index feb01f11a1af2..bb6b720febe59 100644 ---- a/tensorflow/python/lib/core/bfloat16.cc -+++ b/tensorflow/python/lib/core/bfloat16.cc -@@ -517,7 +517,7 @@ bool RegisterBfloat16Cast(int numpy_type, bool cast_is_safe) { - } - - template <typename InType, typename OutType, typename Functor> --void BinaryUFunc(char** args, npy_intp* dimensions, npy_intp* steps, -+void BinaryUFunc(char** args, const npy_intp* dimensions, const npy_intp* steps, - void* data) { - const char* i0 = args[0]; - const char* i1 = args[1]; -@@ -532,11 +532,17 @@ void BinaryUFunc(char** args, npy_intp* dimensions, npy_intp* steps, - } - } - -+// Numpy changed const-ness of PyUFuncGenericFunction, provide overload. - template <typename Functor> - void CompareUFunc(char** args, npy_intp* dimensions, npy_intp* steps, - void* data) { - BinaryUFunc<bfloat16, npy_bool, Functor>(args, dimensions, steps, data); - } -+template <typename Functor> -+void CompareUFunc(char** args, const npy_intp* dimensions, -+ const npy_intp* steps, void* data) { -+ BinaryUFunc<bfloat16, npy_bool, Functor>(args, dimensions, steps, data); -+} - - struct Bfloat16EqFunctor { - npy_bool operator()(bfloat16 a, bfloat16 b) { return a == b; } \ No newline at end of file diff --git a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/t/TensorFlow/TensorFlow-2.3.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 3a91cdbed7662d561c0cfe536ce5dabdd7586078..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/TensorFlow-2.3.1-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,180 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'TensorFlow' -version = '2.3.1' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://www.tensorflow.org/' -description = "An open-source software library for Machine Intelligence" - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -site_contacts = 'a.strube@fz-juelich.de' - -local_cudaver = '11.0' - -builddependencies = [ - ('binutils', '2.34'), - ('Bazel', '3.6.0'), - # protobuf > 3.9.2 required - ('protobuf', '3.13.0'), - # git 2.x required, see also https://github.com/tensorflow/tensorflow/issues/29053 - ('git', '2.28.0'), - ('pybind11', '2.5.0', versionsuffix), -] -dependencies = [ - ('CUDA', local_cudaver, '', SYSTEM), - ('cuDNN', '8.0.2.39', '-CUDA-%s' % local_cudaver, SYSTEM), - ('NCCL', '2.8.3-1', '-CUDA-%s' % local_cudaver), - ('Python', '3.8.5'), - ('h5py', '2.10.0', '-serial%s' % versionsuffix), - ('cURL', '7.71.1'), - ('double-conversion', '3.1.5'), - ('flatbuffers', '1.12.0'), - ('giflib', '5.2.1'), - ('hwloc', '2.2.0'), - ('ICU', '67.1'), - ('JsonCpp', '1.9.4'), - ('libjpeg-turbo', '2.0.5'), - ('LMDB', '0.9.24'), - ('NASM', '2.15.03'), - ('nsync', '1.24.0'), - ('SQLite', '3.32.3'), - ('PCRE', '8.44'), - ('protobuf-python', '3.13.0', versionsuffix), - ('libpng', '1.6.37'), - ('snappy', '1.1.8'), - ('SWIG', '4.0.2', versionsuffix), - ('zlib', '1.2.11'), -] - -exts_default_options = { - 'source_urls': [PYPI_SOURCE], - 'sanity_pip_check': True, -} -use_pip = True - -# Dependencies created and updated using findPythonDeps.sh: -# https://gist.github.com/Flamefire/49426e502cd8983757bd01a08a10ae0d -exts_list = [ - ('pandas', '1.1.3' - ), - ('numpy', '1.18.5', { - 'source_urls': ['https://github.com/numpy/numpy/releases/download/v1.18.5/'], - 'patches': ['numpy-1.14.2-mkl.patch', - 'numpy-1.14.2-xhost.patch', - ], - }), - ('Markdown', '3.2.2', { - 'checksums': ['1fafe3f1ecabfb514a5285fca634a53c1b32a81cb0feb154264d55bf2ff22c17'], - }), - ('pyasn1-modules', '0.2.8', { - 'checksums': ['905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e'], - }), - ('rsa', '4.6', { - 'checksums': ['109ea5a66744dd859bf16fe904b8d8b627adafb9408753161e766a92e7d681fa'], - }), - ('cachetools', '4.1.1', { - 'checksums': ['bbaa39c3dede00175df2dc2b03d0cf18dd2d32a7de7beb68072d13043c9edb20'], - }), - ('google-auth', '1.22.1', { - 'modulename': 'google.auth', - }), - ('oauthlib', '3.1.0', { - 'checksums': ['bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889'], - }), - ('requests-oauthlib', '1.3.0', { - 'checksums': ['b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a'], - }), - ('google-auth-oauthlib', '0.4.1', { - 'checksums': ['88d2cd115e3391eb85e1243ac6902e76e77c5fe438b7276b297fbe68015458dd'], - }), - ('absl-py', '0.10.0', { - 'modulename': 'absl', - 'checksums': ['b20f504a7871a580be5268a18fbad48af4203df5d33dbc9272426cb806245a45'], - }), - ('astunparse', '1.6.3', { - 'checksums': ['5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872'], - }), - ('grpcio', '1.33.1', { - 'modulename': 'grpc', - }), - ('tensorboard-plugin-wit', '1.7.0', { - 'source_tmpl': 'tensorboard_plugin_wit-%(version)s-py3-none-any.whl', - 'unpack_sources': False, - 'checksums': ['ee775f04821185c90d9a0e9c56970ee43d7c41403beb6629385b39517129685b'], - }), - ('tensorboard', '2.3.0', { - 'source_tmpl': 'tensorboard-%(version)s-py3-none-any.whl', - 'unpack_sources': False, - 'checksums': ['d34609ed83ff01dd5b49ef81031cfc9c166bba0dabd60197024f14df5e8eae5e'], - }), - ('google-pasta', '0.2.0', { - 'modulename': 'pasta', - 'checksums': ['c9f2c8dfc8f96d0d5808299920721be30c9eec37f2389f28904f454565c8a16e'], - }), - ('termcolor', '1.1.0', { - 'checksums': ['1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b'], - }), - ('tensorflow-estimator', '2.3.0', { - 'source_tmpl': 'tensorflow_estimator-%(version)s-py2.py3-none-any.whl', - 'unpack_sources': False, - 'checksums': ['b75e034300ccb169403cf2695adf3368da68863aeb0c14c3760064c713d5c486'], - }), - ('astor', '0.8.1', { - }), - ('gast', '0.3.3', { - 'checksums': ['b881ef288a49aa81440d2c5eb8aeefd4c2bb8993d5f50edae7413a85bfdb3b57'], - }), - ('opt-einsum', '3.3.0', { - 'source_tmpl': 'opt_einsum-%(version)s.tar.gz', - 'checksums': ['59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549'], - }), - ('wrapt', '1.12.1', { - 'checksums': ['b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7'], - }), - ('Keras-Preprocessing', '1.1.2', { - 'source_tmpl': 'Keras_Preprocessing-%(version)s.tar.gz', - 'checksums': ['add82567c50c8bc648c14195bf544a5ce7c1f76761536956c3d2978970179ef3'], - }), - (name, version, { - 'patches': [ - 'TensorFlow-2.1.0_fix-cuda-build.patch', - 'TensorFlow-2.1.0_fix-system-nasm.patch', - 'TensorFlow-2.3.0_fix-json-include-style.patch', - 'TensorFlow-2.3.0_fix-system_absl_py.patch', - 'TensorFlow-2.3.0_fix-system-protobuf.patch', - 'TensorFlow-2.3.0_fix-system-protobuf2.patch', - 'TensorFlow-2.3.0_fix-protoc-build.patch', - 'TensorFlow-2.3.0_revert-tools-to-exectools-renaming.patch', - 'TensorFlow-2.3.1-fix-numpy-gcc10.patch', - ], - 'source_tmpl': 'v%(version)s.tar.gz', - 'source_urls': ['https://github.com/tensorflow/tensorflow/archive/'], - # 'test_script': 'TensorFlow-2.x_mnist-test.py', # Fails on juwels login nodes - 'checksums': [ - 'ee534dd31a811f7a759453567257d1e643f216d8d55a25c32d2fbfff8153a1ac', # v2.3.1.tar.gz - '78c20aeaa7784b8ceb46238a81e8c2461137d28e0b576deeba8357d23fbe1f5a', # TensorFlow-2.1.0_fix-cuda-build.patch - # TensorFlow-2.1.0_fix-system-nasm.patch - '6671e40d60edaf1e57b1861aa3b2178d48f9b7dfb5b5c0d44db541116f848f2a', - # TensorFlow-2.3.0_fix-json-include-style.patch - 'e04f5d9aa4c1222393b2cad4b84cd63128ef56eb5cc732d2afbc5d98dfa596a4', - # TensorFlow-2.3.0_fix-system_absl_py.patch - '04e53b637ca64c4804a4a8d636d75e3c09257034b143b70a33bfee09e6ab4b10', - # TensorFlow-2.3.0_fix-system-protobuf.patch - '4407c3f126eb9d3c1658ef8d6ab3a26e7ed2a40ac869dd194d8a1282f6432c1d', - # TensorFlow-2.3.0_fix-system-protobuf2.patch - '74718e007acb7f5f016848218421895a33e9e894620154482deb75408225577d', - # TensorFlow-2.3.0_fix-protoc-build.patch - '78a8390ea5e06dfcfb25f6434721abbe678e37c4ad7f052aad766c4df399bd53', - # TensorFlow-2.3.0_revert-tools-to-exectools-renaming.patch - '1a72f072a764bf66733445ce027c0bad7de7c88092363c0e2a7b91a422608d56', - # TensorFlow-2.3.1-fix-numpy-gcc10.patch - '48250cb9f5287438021ec50f32618b200a90ca81be069f1d888fdef655ed6eed', - ], - 'cuda_compute_capabilities': ['7.0', '7.5', '8.0'], - }), -] - -moduleclass = 'lib' diff --git a/Golden_Repo/t/TensorFlow/numpy-1.14.2-mkl.patch b/Golden_Repo/t/TensorFlow/numpy-1.14.2-mkl.patch deleted file mode 100644 index 4984299a06b64023175ef8fb4ccd7abcb5303ba6..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/numpy-1.14.2-mkl.patch +++ /dev/null @@ -1,53 +0,0 @@ -diff -ruN numpy-1.14.2.orig/numpy/distutils/fcompiler/__init__.py numpy-1.14.2/numpy/distutils/fcompiler/__init__.py ---- numpy-1.14.2.orig/numpy/distutils/fcompiler/__init__.py 2018-03-01 01:03:27.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/fcompiler/__init__.py 2018-04-19 15:33:19.288258230 +0200 -@@ -625,7 +625,10 @@ - return options - - def library_option(self, lib): -- return "-l" + lib -+ if lib[0]=='-': -+ return lib -+ else: -+ return "-l" + lib - def library_dir_option(self, dir): - return "-L" + dir - -diff -ruN numpy-1.14.2.orig/numpy/distutils/system_info.py numpy-1.14.2/numpy/distutils/system_info.py ---- numpy-1.14.2.orig/numpy/distutils/system_info.py 2018-03-12 16:49:53.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/system_info.py 2018-04-19 15:35:50.127908579 +0200 -@@ -727,7 +727,7 @@ - if is_string(default): - return [default] - return default -- return [b for b in [a.strip() for a in libs.split(',')] if b] -+ return [b for b in [a.strip().replace(':',',') for a in libs.split(',')] if b] - - def get_libraries(self, key='libraries'): - if hasattr(self, '_lib_names'): -@@ -812,6 +812,9 @@ - # make sure we preserve the order of libs, as it can be important - found_dirs, found_libs = [], [] - for lib in libs: -+ if lib[0] == '-': -+ found_libs.append(lib) -+ continue - for lib_dir in lib_dirs: - found_lib = self._find_lib(lib_dir, lib, exts) - if found_lib: -diff -ruN numpy-1.14.2.orig/numpy/distutils/unixccompiler.py numpy-1.14.2/numpy/distutils/unixccompiler.py ---- numpy-1.14.2.orig/numpy/distutils/unixccompiler.py 2018-03-01 01:03:27.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/unixccompiler.py 2018-04-19 15:37:42.892095000 +0200 -@@ -136,3 +136,12 @@ - - replace_method(UnixCCompiler, 'create_static_lib', - UnixCCompiler_create_static_lib) -+ -+def UnixCCompiler_library_option(self, lib): -+ if lib[0]=='-': -+ return lib -+ else: -+ return "-l" + lib -+ -+replace_method(UnixCCompiler, 'library_option', -+ UnixCCompiler_library_option) diff --git a/Golden_Repo/t/TensorFlow/numpy-1.14.2-xhost.patch b/Golden_Repo/t/TensorFlow/numpy-1.14.2-xhost.patch deleted file mode 100644 index 5c4519f2fbabf5f8741c799078335658ec641d82..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TensorFlow/numpy-1.14.2-xhost.patch +++ /dev/null @@ -1,24 +0,0 @@ -diff -ruN numpy-1.14.2.orig/numpy/distutils/fcompiler/intel.py numpy-1.14.2/numpy/distutils/fcompiler/intel.py ---- numpy-1.14.2.orig/numpy/distutils/fcompiler/intel.py 2018-03-01 01:03:27.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/fcompiler/intel.py 2018-04-19 15:30:13.945734000 +0200 -@@ -61,7 +61,7 @@ - return ['-fp-model strict -O1 -{}'.format(mpopt)] - - def get_flags_arch(self): -- return [] -+ return ['-xHost'] - - def get_flags_linker_so(self): - opt = FCompiler.get_flags_linker_so(self) -diff -ruN numpy-1.14.2.orig/numpy/distutils/intelccompiler.py numpy-1.14.2/numpy/distutils/intelccompiler.py ---- numpy-1.14.2.orig/numpy/distutils/intelccompiler.py 2018-03-01 01:03:27.000000000 +0100 -+++ numpy-1.14.2/numpy/distutils/intelccompiler.py 2018-04-19 15:30:43.659844000 +0200 -@@ -61,7 +61,7 @@ - v = self.get_version() - mpopt = 'openmp' if v and v < '15' else 'qopenmp' - self.cc_exe = ('icc -m64 -fPIC -fp-model strict -O3 ' -- '-fomit-frame-pointer -{}').format(mpopt) -+ '-fomit-frame-pointer -xHost -{}').format(mpopt) - compiler = self.cc_exe - - if platform.system() == 'Darwin': diff --git a/Golden_Repo/t/Tk/Tk-8.6.10-GCCcore-10.3.0.eb b/Golden_Repo/t/Tk/Tk-8.6.10-GCCcore-10.3.0.eb deleted file mode 100644 index 22e4abd3ff9ef01d878465a91ad0ae671f03ff6f..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/Tk/Tk-8.6.10-GCCcore-10.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Tk' -version = '8.6.10' - -homepage = 'http://www.tcl.tk/' -description = """Tk is an open source, cross-platform widget toolchain that provides a library of basic elements -for building a graphical user interface (GUI) in many different programming languages. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ["ftp://ftp.tcl.tk/pub/tcl/tcl8_6"] -sources = ['%(namelower)s%(version)s-src.tar.gz'] - -patches = ['Tk-8.6.8_different-prefix-with-tcl.patch'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('Tcl', version), - ('X11', '20200222'), -] - -configopts = '--disable-xss --enable-threads --with-tcl=$EBROOTTCL/lib CFLAGS="-I$EBROOTTCL/include"' - -start_dir = 'unix' - -moduleclass = 'vis' diff --git a/Golden_Repo/t/Tk/Tk-8.6.10-GCCcore-9.3.0.eb b/Golden_Repo/t/Tk/Tk-8.6.10-GCCcore-9.3.0.eb deleted file mode 100644 index de735e248cb0524727b304993c7a8c0a46aaabca..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/Tk/Tk-8.6.10-GCCcore-9.3.0.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Tk' -version = '8.6.10' - -homepage = 'http://www.tcl.tk/' -description = """Tk is an open source, cross-platform widget toolchain that provides a library of basic elements -for building a graphical user interface (GUI) in many different programming languages. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ["ftp://ftp.tcl.tk/pub/tcl/tcl8_6"] -sources = ['%(namelower)s%(version)s-src.tar.gz'] - -patches = ['Tk-8.6.8_different-prefix-with-tcl.patch'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('Tcl', version), - ('X11', '20200222'), -] - -configopts = '--disable-xss --enable-threads --with-tcl=$EBROOTTCL/lib CFLAGS="-I$EBROOTTCL/include"' - -start_dir = 'unix' - -moduleclass = 'vis' diff --git a/Golden_Repo/t/Tk/Tk-8.6.8_different-prefix-with-tcl.patch b/Golden_Repo/t/Tk/Tk-8.6.8_different-prefix-with-tcl.patch deleted file mode 100644 index b95af0c8e96135c94aa5384e2bf5bb3aa409248c..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/Tk/Tk-8.6.8_different-prefix-with-tcl.patch +++ /dev/null @@ -1,23 +0,0 @@ -diff -ruN tk8.6.8.orig/generic/tkWindow.c tk8.6.8/generic/tkWindow.c ---- tk8.6.8.orig/generic/tkWindow.c 2017-12-06 16:25:08.000000000 +0100 -+++ tk8.6.8/generic/tkWindow.c 2018-03-26 15:07:54.077034321 +0200 -@@ -954,6 +954,7 @@ - - Tcl_SetVar2(interp, "tk_patchLevel", NULL, TK_PATCH_LEVEL, TCL_GLOBAL_ONLY); - Tcl_SetVar2(interp, "tk_version", NULL, TK_VERSION, TCL_GLOBAL_ONLY); -+ Tcl_SetVar2(interp, "tk_library", NULL, TK_LIBRARY, TCL_GLOBAL_ONLY); - - tsdPtr->numMainWindows++; - return tkwin; -diff -ruN tk8.6.8.orig/unix/Makefile.in tk8.6.8/unix/Makefile.in ---- tk8.6.8.orig/unix/Makefile.in 2017-12-21 20:48:19.000000000 +0100 -+++ tk8.6.8/unix/Makefile.in 2018-03-26 15:08:31.714837462 +0200 -@@ -1026,7 +1026,7 @@ - $(CC) -c $(CC_SWITCHES) $(GENERIC_DIR)/tkVisual.c - - tkWindow.o: $(GENERIC_DIR)/tkWindow.c -- $(CC) -c $(CC_SWITCHES) $(GENERIC_DIR)/tkWindow.c -+ $(CC) -c $(CC_SWITCHES) -DTK_LIBRARY=\"${TK_LIBRARY}\" $(GENERIC_DIR)/tkWindow.c - - tkButton.o: $(GENERIC_DIR)/tkButton.c - $(CC) -c $(CC_SWITCHES) $(GENERIC_DIR)/tkButton.c diff --git a/Golden_Repo/t/TotalView/TotalView-2020.1.13-GCCcore-9.3.0.eb b/Golden_Repo/t/TotalView/TotalView-2020.1.13-GCCcore-9.3.0.eb deleted file mode 100644 index aff7ec04bf4f5f3b55f8a4972d70a848533790bd..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TotalView/TotalView-2020.1.13-GCCcore-9.3.0.eb +++ /dev/null @@ -1,62 +0,0 @@ -# This is an easyconfig file for EasyBuild, see -# https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2014 Juelich Supercomputing Centre, Germany -# Authors:: Alexandre Strube <surak@surak.eti.br> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -name = "TotalView" -version = "2020.1.13" - -homepage = 'http://www.roguewave.com/products-services/totalview' - -description = """TotalView breaks down barriers to understanding what's going - on with your high-performance computing (HPC) and supercomputing applications. - Purpose-built for multicore and parallel computing, TotalView provides a set - of tools providing unprecedented control over processes and thread execution, - along with deep visibility into program states and data. - - By allowing the simultaneous debugging of many processes and threads in a - single window, you get complete control over program execution: running, - stepping, and halting line-by-line through code within a single thread or - within arbitrary groups of processes or threads. You can also work backwards - from failure through reverse debugging, isolating the root cause faster by - eliminating the need to repeatedly restart the application, reproduce and - troubleshoot difficult problems that can occur in concurrent programs that - take advantage of threads, OpenMP, MPI, GPUs, or coprocessors. - - With customizable displays of the state of your running program, memory leaks, - deadlocks, and race conditions are things of the past. Whether you're a - scientific and technical computing veteran, or new to the development - challenges of multicore or parallel applications, TotalView gives you the - insight to find and correct errors quickly, validate prototypes early, verify - calculations accurately, and above all, certify code correctly. - - TotalView works with C, C++, and Fortran applications written for Linux - (including the Cray and Blue Gene platforms), UNIX, Mac OS X, and Xeon Phi - coprocessor, and supports OpenMP, MPI, and OpenACC / CUDA. - """ - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - - -dependencies = [ - ('X11', '20200222'), -] - -sources = [ - '%(namelower)s_%(version)s_linux_x86-64.tar', - '%(namelower)s.%(version)s-doc.tar', -] - -sanity_check_paths = { - 'files': ["toolworks/%(namelower)s.%(version)s/bin/totalview"], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/t/TotalView/TotalView-2021.1.16-GCCcore-9.3.0.eb b/Golden_Repo/t/TotalView/TotalView-2021.1.16-GCCcore-9.3.0.eb deleted file mode 100644 index 7fa2d5b6f49d7d40434d59cbe9fb3145e8d109d9..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/TotalView/TotalView-2021.1.16-GCCcore-9.3.0.eb +++ /dev/null @@ -1,62 +0,0 @@ -# This is an easyconfig file for EasyBuild, see -# https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2014 Juelich Supercomputing Centre, Germany -# Authors:: Alexandre Strube <surak@surak.eti.br> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -name = "TotalView" -version = "2021.1.16" - -homepage = 'http://www.roguewave.com/products-services/totalview' - -description = """TotalView breaks down barriers to understanding what's going - on with your high-performance computing (HPC) and supercomputing applications. - Purpose-built for multicore and parallel computing, TotalView provides a set - of tools providing unprecedented control over processes and thread execution, - along with deep visibility into program states and data. - - By allowing the simultaneous debugging of many processes and threads in a - single window, you get complete control over program execution: running, - stepping, and halting line-by-line through code within a single thread or - within arbitrary groups of processes or threads. You can also work backwards - from failure through reverse debugging, isolating the root cause faster by - eliminating the need to repeatedly restart the application, reproduce and - troubleshoot difficult problems that can occur in concurrent programs that - take advantage of threads, OpenMP, MPI, GPUs, or coprocessors. - - With customizable displays of the state of your running program, memory leaks, - deadlocks, and race conditions are things of the past. Whether you're a - scientific and technical computing veteran, or new to the development - challenges of multicore or parallel applications, TotalView gives you the - insight to find and correct errors quickly, validate prototypes early, verify - calculations accurately, and above all, certify code correctly. - - TotalView works with C, C++, and Fortran applications written for Linux - (including the Cray and Blue Gene platforms), UNIX, Mac OS X, and Xeon Phi - coprocessor, and supports OpenMP, MPI, and OpenACC / CUDA. - """ - -site_contacts = 'm.knobloch@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - - -dependencies = [ - ('X11', '20200222'), -] - -sources = [ - '%(namelower)s_%(version)s_linux_x86-64.tar', - '%(namelower)s.%(version)s-doc.tar', -] - -sanity_check_paths = { - 'files': ["toolworks/%(namelower)s.%(version)s/bin/totalview"], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/t/Turbomole/Turbomole-7.4.1-intel-2020hm.eb b/Golden_Repo/t/Turbomole/Turbomole-7.4.1-intel-2020hm.eb deleted file mode 100644 index 1cbcb059889f427fecf2e65108f67fa89817dda2..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/Turbomole/Turbomole-7.4.1-intel-2020hm.eb +++ /dev/null @@ -1,166 +0,0 @@ -easyblock = 'PackedBinary' - -name = 'Turbomole' -version = '7.4.1' -versionsuffix = 'hm' - -homepage = 'http://www.turbomole.com' -description = """ -TURBOMOLE has been designed for robust and fast quantum chemical applications. -It provides all standard and state of the art methods for ground state calculations (Hartree-Fock, DFT, MP2, CCSD(T)), -excited state calculations at different levels (full RPA, TDDFT, CIS(D), CC2, ADC(2), ...), -geometry optimizations, transition state searches, spectra (IR, UV/Vis, Raman, CD). - -Turbomole has low memory and disk space requirements using direct and semi-direct algorithm -combined with efficient integral evaluation, stable and accurate grids for numerical integration -of DFT functionals and provide full support of all finite point groups. -""" - -site_contacts = 'Th. Mueller (th.mueller@fz-juelich.de)' - -usage = """ - -Input generation through the interactive command line utility define or via TmoleX a GUI for input generation -and output analysis available from the turbomole website. - -Structure optimizations are carried out via the jobex script. - -Individual calculations can be carried out by direct invocation of the individual modules. -The standard arguments - - -n <number_of_tasks> using as many threads (SMP) or processes (MPI), respectively - -seriel forcing seriel operation - -mpi forcing the mpi version - -fork forcing the fork version (single node operation) - -omp forcing the openmp version (single node operation) - -ompstacksize overriding the default value OMP_STACKSIZE (256M) -e.g. - - running the MPI parallel version of ridft on 24 cores of a single node - - export TURBODIR=$EBROOTTURBOMOLE - salloc --nodes=1 --partition=batch --time=01:00:00 - ridft -n 24 -mpi - - The standard mode of operation is affected by exporting the environment variables - TM_PAR_OMP, TM_PAR_FORK, TM_PAR_MPI (increasing priority) - - The command line arguments (-seriel,-mpi,-fork,-omp) override the default operation. - - -The installation on Jureca is to some extent non-standard as the defaults are adjusted -to provide optimum performance on a single node. -Some modules provide several modes of parallelization. Note, that not all features -of every module are parallized (varying also between different parallelization modes) -so that depending on your application the parallel scalability may be limited. - - -modules description default TM_PAR_OMP TM_PAR_FORK TM_PAR_MPI ------------------------------------------------------------------------------------- -aoforce 2nd derivatives OMP OMP FORK MPI -ccsdf12 ccsd F12 OMP OMP -- MPI -dscf DFT/HF OMP OMP FORK MPI -egrad TDDFT gradients OMP OMP FORK --- -escf TDDFT OMP OMP FORK MPI -grad DFT/HF gradients OMP OMP FORK MPI -evib vibronic coupling OMP OMP -- MPI -mpgrad MP2 gradient MPI -- -- MPI -mpshift MP2 NMR shifts OMP OMP -- -- -odft orbital dep. DFT OMP OMP -- -- -pnoccsd PNO-CCSD OMP OMP -- MPI -proper properties OMP OMP -- -- -rdgrad DFT/HF gradients - RI approx. MPI* OMP -- MPI* -ricc2 CC RI approx. OMP OMP -- (MPI) -ridft DFT/HF RI approx MPI* OMP -- MPI* -riper DFT RI approx. - 0D-3D PBC OMP OMP -- -- -rirpa RPA ri approx. OMP OMP -- -- -tb tight binding OMP OMP -- -- ---------------------------------------------------------------------------------------- - -MPI* = MPI+shared memory implementation (single and multiple nodes) -(MPI) = unstable MPI operation (potential dead locks) -""" - -examples = """ -Example batch script: - -#!/bin/bash -#SBATCH --time=01:00:00 -#SBATCH --output=output.%j -#SBATCH --error=error.%j -#SBATCH --partition=devel -#SBATCH --job-name=tmtest -#SBATCH --nodes=1 -#SBATCH --account=<MYACCOUNT> - -module load Intel -module load IntelMPI -module load Turbomole/7.4.1hm - -# switch to the input directory -# running a geometry optimization with ridft/rdgrad on 24 cores/1 node in MPI mode -jobex -l "$TURBODIR/bin/JURECA_DC" -ri -np 24 - -# running a geometry optimization with dscf/grad on 24 cores/1 node in OMP mode -jobex -l "$TURBODIR/bin/JURECA_DC" -np 24 - -# running a geometry optimization with dscf/grad on 24 cores/1 node in MPI mode -export TM_PAR_MPI=ON -jobex -l "$TURBODIR/bin/JURECA_DC" -np 24 - -# running an individual DFT calculation using dscf in OMP mode (default) -unset TM_PAR_MPI -dscf -n 24 > dscf.out -# running an individual DFT calculation using dscf in FORK mode -export TM_PAR_FORK=ON -dscf -n 24 > dscf.out -# running an individual DFT calculation using dscf in MPI mode -unset TM_PAR_FORK -export TM_PAR_MPI=ON -dscf -n 24 > dscf.out - -Documentation: - - Please refer to $TURBODIR/DOK/turbomole_7.4.1_manual.pdf for a detailed description. - - Examples with typical turn-around times of a few minutes on a single - node can be found in $TURBODIR/Examples. - - Large SMP calculations might be sensitive to the default size of - the OMP stack - increase/change the default size of 256M - e.g. via -ompstacksize 512M - - - Note, in contrast to the statements in the manual, - the environment variable PARA_ARCH is *not* operational in this setup! - Please refer also to the file $TURBODIR/DOK/Known_Problems.txt. - Direct questions to th.mueller@fz-juelich.de. -""" - -toolchain = {'name': 'intel', 'version': '2020'} - -sources = ['%(namelower)s.%(version)s_%(versionsuffix)s_JURECA_DC.tar.gz'] - -group = "turbomol" - -modextravars = { - 'TURBODIR': '%(installdir)s', - 'TURBOLD': '%(installdir)s/bin/JURECA_DC', - 'OMP_WAIT_POLICY': 'passive', - 'OMP_STACKSIZE': '128M' -} - -modextrapaths = { - 'PATH': ["scripts", "bin/JURECA_DC"] -} - -sanity_check_paths = { - 'dirs': ['bin', 'bin/JURECA_DC', 'DOK', 'basen', 'basold', 'cabasen', 'cbasen', - 'xbasen', 'jbasen', 'jkbasen', 'Examples'], - 'files': ['bin/JURECA_DC/ridft_mpi', 'bin/JURECA_DC/rdgrad_mpi', - 'bin/x86_64-unknown-linux-gnu/ridft', 'DOK/Turbomole_7.4.1.pdf'] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/t/tbb/tbb-2020.3-GCCcore-10.3.0.eb b/Golden_Repo/t/tbb/tbb-2020.3-GCCcore-10.3.0.eb deleted file mode 100644 index bf85fef57496d077c4f2279b8b38bc56fae2fb8f..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/tbb/tbb-2020.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,22 +0,0 @@ -name = 'tbb' -version = '2020.3' - -homepage = 'https://github.com/oneapi-src/oneTBB' -description = """Intel(R) Threading Building Blocks (Intel(R) TBB) lets you easily write parallel C++ programs that - take full advantage of multicore performance, that are portable, composable and have future-proof scalability.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/oneapi-src/oneTBB/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = [ - 'ebc4f6aa47972daed1f7bf71d100ae5bf6931c2e3144cf299c8cc7d041dca2f3' -] - -builddependencies = [ - ('binutils', '2.36.1') -] - -moduleclass = 'lib' diff --git a/Golden_Repo/t/tbb/tbb-2020.3-GCCcore-9.3.0.eb b/Golden_Repo/t/tbb/tbb-2020.3-GCCcore-9.3.0.eb deleted file mode 100644 index 5bf782bb53608b034befe593e73458b4024e5edf..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/tbb/tbb-2020.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,22 +0,0 @@ -name = 'tbb' -version = '2020.3' - -homepage = 'https://github.com/oneapi-src/oneTBB' -description = """Intel(R) Threading Building Blocks (Intel(R) TBB) lets you easily write parallel C++ programs that - take full advantage of multicore performance, that are portable, composable and have future-proof scalability.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/oneapi-src/oneTBB/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = [ - 'ebc4f6aa47972daed1f7bf71d100ae5bf6931c2e3144cf299c8cc7d041dca2f3' -] - -builddependencies = [ - ('binutils', '2.34') -] - -moduleclass = 'lib' diff --git a/Golden_Repo/t/tcsh/tcsh-6.22.02-GCCcore-10.3.0.eb b/Golden_Repo/t/tcsh/tcsh-6.22.02-GCCcore-10.3.0.eb deleted file mode 100644 index e13386548dd0679bcbf73bc37b5ba99b91c652be..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/tcsh/tcsh-6.22.02-GCCcore-10.3.0.eb +++ /dev/null @@ -1,51 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg/Computer Science and Communications Research Unit -# Authors:: Valentin Plugaru <valentin.plugaru@gmail.com> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_05-06.html -## -easyblock = 'ConfigureMake' - -name = 'tcsh' -version = '6.22.02' - -homepage = 'https://www.tcsh.org' -description = """Tcsh is an enhanced, but completely compatible version of the Berkeley UNIX C shell (csh). - It is a command language interpreter usable both as an interactive login shell and a shell script command - processor. It includes a command-line editor, programmable word completion, spelling correction, a history - mechanism, job control and a C-like syntax. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'ftp://ftp.astron.com/pub/%(namelower)s', - 'ftp://ftp.astron.com/pub/%(namelower)s/old', -] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('ncurses', '6.2') -] - -buildopts = 'CFLAGS=-fno-common' - -patches = ['tcsh-6.22.02_gcc10.patch'] - -sanity_check_paths = { - 'files': ["bin/tcsh"], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/t/tcsh/tcsh-6.22.02-GCCcore-9.3.0.eb b/Golden_Repo/t/tcsh/tcsh-6.22.02-GCCcore-9.3.0.eb deleted file mode 100644 index ceb1f02fe628ded1cd643343eb4a78d76b3f966d..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/tcsh/tcsh-6.22.02-GCCcore-9.3.0.eb +++ /dev/null @@ -1,47 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg/Computer Science and Communications Research Unit -# Authors:: Valentin Plugaru <valentin.plugaru@gmail.com> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_05-06.html -## -easyblock = 'ConfigureMake' - -name = 'tcsh' -version = '6.22.02' - -homepage = 'https://www.tcsh.org' -description = """Tcsh is an enhanced, but completely compatible version of the Berkeley UNIX C shell (csh). - It is a command language interpreter usable both as an interactive login shell and a shell script command - processor. It includes a command-line editor, programmable word completion, spelling correction, a history - mechanism, job control and a C-like syntax. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [ - 'ftp://ftp.astron.com/pub/%(namelower)s', - 'ftp://ftp.astron.com/pub/%(namelower)s/old', -] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('ncurses', '6.2') -] - -sanity_check_paths = { - 'files': ["bin/tcsh"], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/t/tcsh/tcsh-6.22.02_gcc10.patch b/Golden_Repo/t/tcsh/tcsh-6.22.02_gcc10.patch deleted file mode 100644 index c2caf2530ee03ad632574f30cb49e1549af463e8..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/tcsh/tcsh-6.22.02_gcc10.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- tcsh-6.22.02/tc.sig.orig 2021-07-09 18:56:35.226284291 +0200 -+++ tcsh-6.22.02/tc.sig.c 2021-07-09 18:57:20.678131519 +0200 -@@ -56,7 +56,7 @@ - int phup_disabled; /* = 0; */ - int pchild_disabled; /* = 0; */ - int pintr_disabled; /* = 0; */ --int handle_interrupt; /* = 0; */ -+ - - int - handle_pending_signals(void) diff --git a/Golden_Repo/t/texinfo/texinfo-6.7-GCCcore-10.3.0.eb b/Golden_Repo/t/texinfo/texinfo-6.7-GCCcore-10.3.0.eb deleted file mode 100644 index 66e329a7f437e36fc2617df8c0d0c6bd116522c7..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/texinfo/texinfo-6.7-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'texinfo' -version = '6.7' - -homepage = 'https://www.gnu.org/software/texinfo/' -description = """Texinfo is the official documentation format of the GNU project.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('ncurses', '6.2'), - ('texlive', '20200406'), -] - -preinstallopts = "make TEXMF=%(installdir)s/texmf install-tex && " - -# This will overwrite a users $TEXMFHOME so this module is best used as a build dependency -modextravars = {'TEXMFHOME': '%(installdir)s/texmf'} -modloadmsg = "\\n" -modloadmsg += "WARNING: This texinfo module has (re)defined the value for the environment variable \\$TEXMFHOME.\\n" -modloadmsg += "If you use a custom texmf directory (such as ~/texmf) you should copy files found in the\\n" -modloadmsg += "new \\$TEXMFHOME to your custom directory and reset the value of \\$TEXMFHOME to point to that space:\\n" -modloadmsg += "\\tcp -r \\$TEXMFHOME/* /path/to/your/texmf\\n" -modloadmsg += "\\texport TEXMFHOME=/path/to/your/texmf\\n" - -sanity_check_paths = { - 'files': ['bin/info', 'bin/makeinfo', 'bin/pod2texi', 'bin/texi2pdf', 'texmf/tex/texinfo/texinfo.tex'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/t/texinfo/texinfo-6.7-GCCcore-9.3.0.eb b/Golden_Repo/t/texinfo/texinfo-6.7-GCCcore-9.3.0.eb deleted file mode 100644 index 3bf0f70a4452f445269f286367f9b80e5b52e950..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/texinfo/texinfo-6.7-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'texinfo' -version = '6.7' - -homepage = 'https://www.gnu.org/software/texinfo/' -description = """Texinfo is the official documentation format of the GNU project.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -source_urls = [GNU_SOURCE] -sources = [SOURCELOWER_TAR_XZ] - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('ncurses', '6.2'), - ('texlive', '20200406'), -] - -preinstallopts = "make TEXMF=%(installdir)s/texmf install-tex && " - -# This will overwrite a users $TEXMFHOME so this module is best used as a build dependency -modextravars = {'TEXMFHOME': '%(installdir)s/texmf'} -modloadmsg = "\\n" -modloadmsg += "WARNING: This texinfo module has (re)defined the value for the environment variable \\$TEXMFHOME.\\n" -modloadmsg += "If you use a custom texmf directory (such as ~/texmf) you should copy files found in the\\n" -modloadmsg += "new \\$TEXMFHOME to your custom directory and reset the value of \\$TEXMFHOME to point to that space:\\n" -modloadmsg += "\\tcp -r \\$TEXMFHOME/* /path/to/your/texmf\\n" -modloadmsg += "\\texport TEXMFHOME=/path/to/your/texmf\\n" - -sanity_check_paths = { - 'files': ['bin/info', 'bin/makeinfo', 'bin/pod2texi', 'bin/texi2pdf', 'texmf/tex/texinfo/texinfo.tex'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/t/texlive/texlive-20200406-GCCcore-10.3.0.eb b/Golden_Repo/t/texlive/texlive-20200406-GCCcore-10.3.0.eb deleted file mode 100644 index f66a24070d377bc81f058985259cab6278765642..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/texlive/texlive-20200406-GCCcore-10.3.0.eb +++ /dev/null @@ -1,66 +0,0 @@ -# Based off the 2017 version by John Dey jfdey@fredhutch.org -# https://github.com/easybuilders/easybuild-easyconfigs/pull/5085 -easyblock = 'Tarball' - -name = 'texlive' -version = '20200406' -local_repo = 'ftp://tug.org/historic/systems/texlive/2020/tlnet-final' - -homepage = 'https://tug.org' -description = """TeX is a typesetting language. Instead of visually formatting your text, you enter your manuscript - text intertwined with TeX commands in a plain text file. You then run TeX to produce formatted output, such as a - PDF file. Thus, in contrast to standard word processors, your document is a separate file that does not pretend to - be a representation of the final typeset output, and so can be easily edited and manipulated.""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['ftp://tug.org/texlive/historic/2020/'] -sources = [ - { - 'download_filename': 'install-tl-unx.tar.gz', - 'filename': 'install-tl-unx-%(version)s.tar.gz', - } -] -checksums = ['7c90a50e55533d57170cbc7c0370a010019946eb18570282948e1af6f809382d'] - -dependencies = [ - ('X11', '20200222'), - ('libpng', '1.6.37'), - ('OpenGL', '2020'), - ('Perl', '5.32.0'), - ('HarfBuzz', '2.6.7'), - # Poppler, optional - PDF rendering library - # ('poppler', '0.90.1'), - ('cairo', '1.17.2'), - ('fontconfig', '2.13.92'), - ('zlib', '1.2.11'), - # Graphite2, optional - font system for lesser-known languages - # ('graphite2', '1.3.14'), -] - -postinstallcmds = [ - ( - 'echo "TEXDIR %%(installdir)s/" > %%(installdir)s/texlive.profile && ' - 'echo "TEXMFLOCAL %%(installdir)s/texmf-local" >> %%(installdir)s/texlive.profile && ' - 'echo "TEXMFSYSCONFIG %%(installdir)s/texmf-config" >> %%(installdir)s/texlive.profile && ' - 'echo "TEXMFSYSVAR %%(installdir)s/texmf-var" >> %%(installdir)s/texlive.profile && ' - '%%(builddir)s/install-tl-%%(version)s/install-tl -profile %%(installdir)s/texlive.profile ' - '--repository %s' - ) % (local_repo), -] - -modextrapaths = { - 'PATH': 'bin/x86_64-linux', - 'INFOPATH': 'texmf-dist/doc/info', - 'MANPATH': 'texmf-dist/doc/man', -} -modextravars = { - 'TEXMFHOME': '%(installdir)s/texmf-dist' -} - -sanity_check_paths = { - 'files': ['bin/x86_64-linux/tex', 'bin/x86_64-linux/latex'], - 'dirs': ['bin/x86_64-linux', 'texmf-dist'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/t/texlive/texlive-20200406-GCCcore-9.3.0.eb b/Golden_Repo/t/texlive/texlive-20200406-GCCcore-9.3.0.eb deleted file mode 100644 index 0cec48d8d1e4b5d21d5dbde76936943d1de9221d..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/texlive/texlive-20200406-GCCcore-9.3.0.eb +++ /dev/null @@ -1,66 +0,0 @@ -# Based off the 2017 version by John Dey jfdey@fredhutch.org -# https://github.com/easybuilders/easybuild-easyconfigs/pull/5085 -easyblock = 'Tarball' - -name = 'texlive' -version = '20200406' -local_repo = 'ftp://tug.org/historic/systems/texlive/2020/tlnet-final' - -homepage = 'https://tug.org' -description = """TeX is a typesetting language. Instead of visually formatting your text, you enter your manuscript - text intertwined with TeX commands in a plain text file. You then run TeX to produce formatted output, such as a - PDF file. Thus, in contrast to standard word processors, your document is a separate file that does not pretend to - be a representation of the final typeset output, and so can be easily edited and manipulated.""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['ftp://tug.org/texlive/historic/2020/'] -sources = [ - { - 'download_filename': 'install-tl-unx.tar.gz', - 'filename': 'install-tl-unx-%(version)s.tar.gz', - } -] -checksums = ['7c90a50e55533d57170cbc7c0370a010019946eb18570282948e1af6f809382d'] - -dependencies = [ - ('X11', '20200222'), - ('libpng', '1.6.37'), - ('OpenGL', '2020'), - ('Perl', '5.32.0'), - ('HarfBuzz', '2.6.7'), - # Poppler, optional - PDF rendering library - # ('poppler', '0.90.1'), - ('cairo', '1.17.2'), - ('fontconfig', '2.13.92'), - ('zlib', '1.2.11'), - # Graphite2, optional - font system for lesser-known languages - # ('graphite2', '1.3.14'), -] - -postinstallcmds = [ - ( - 'echo "TEXDIR %%(installdir)s/" > %%(installdir)s/texlive.profile && ' - 'echo "TEXMFLOCAL %%(installdir)s/texmf-local" >> %%(installdir)s/texlive.profile && ' - 'echo "TEXMFSYSCONFIG %%(installdir)s/texmf-config" >> %%(installdir)s/texlive.profile && ' - 'echo "TEXMFSYSVAR %%(installdir)s/texmf-var" >> %%(installdir)s/texlive.profile && ' - '%%(builddir)s/install-tl-%%(version)s/install-tl -profile %%(installdir)s/texlive.profile ' - '--repository %s' - ) % (local_repo), -] - -modextrapaths = { - 'PATH': 'bin/x86_64-linux', - 'INFOPATH': 'texmf-dist/doc/info', - 'MANPATH': 'texmf-dist/doc/man', -} -modextravars = { - 'TEXMFHOME': '%(installdir)s/texmf-dist' -} - -sanity_check_paths = { - 'files': ['bin/x86_64-linux/tex', 'bin/x86_64-linux/latex'], - 'dirs': ['bin/x86_64-linux', 'texmf-dist'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/t/tmux/tmux-3.1b-GCCcore-10.3.0.eb b/Golden_Repo/t/tmux/tmux-3.1b-GCCcore-10.3.0.eb deleted file mode 100644 index c08a1ef191a1db5b0a62cad234537d61623b1413..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/tmux/tmux-3.1b-GCCcore-10.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'tmux' -version = '3.1b' - -homepage = 'http://tmux.sourceforge.net/' -description = """tmux is a terminal multiplexer. It lets you switch easily between several programs in one terminal, -detach them (they keep running in the background) and reattach them to a different terminal. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = ['https://github.com/%(name)s/%(name)s/releases/download/%(version)s/'] - -dependencies = [ - ('ncurses', '6.2'), - ('libevent', '2.1.12'), -] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -sanity_check_paths = { - 'files': ['bin/tmux'], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/t/tmux/tmux-3.1b-GCCcore-9.3.0.eb b/Golden_Repo/t/tmux/tmux-3.1b-GCCcore-9.3.0.eb deleted file mode 100644 index 13740e9daa9e24db9db43a4c6e6de5ba76860d39..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/tmux/tmux-3.1b-GCCcore-9.3.0.eb +++ /dev/null @@ -1,32 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'tmux' -version = '3.1b' - -homepage = 'http://tmux.sourceforge.net/' -description = """tmux is a terminal multiplexer. It lets you switch easily between several programs in one terminal, -detach them (they keep running in the background) and reattach them to a different terminal. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCE_TAR_GZ] -source_urls = ['https://github.com/%(name)s/%(name)s/releases/download/%(version)s/'] - -dependencies = [ - ('ncurses', '6.2'), - ('libevent', '2.1.12'), -] - -builddependencies = [ - ('binutils', '2.34'), -] - -sanity_check_paths = { - 'files': ['bin/tmux'], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/t/torchaudio/torchaudio-0.7.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/t/torchaudio/torchaudio-0.7.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 63cd0885e697808dfc24a73bdbfdb6f89fc32613..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/torchaudio/torchaudio-0.7.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'torchaudio' -version = '0.7.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/pytorch/audio' -description = """ Data manipulation and transformation for audio signal -processing, powered by PyTorch """ - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://github.com/pytorch/audio/archive'] -sources = ['v%(version)s.tar.gz'] - - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('PyTorch', '1.7.0', versionsuffix), - ('SoX', '14.4.2'), -] - -download_dep_fail = True -use_pip = True -sanity_pip_check = True - -moduleclass = 'tools' diff --git a/Golden_Repo/t/torchvision/torchvision-0.8.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/t/torchvision/torchvision-0.8.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 5addb164637cd8cf6b9759f2acd30037d7b80b53..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/torchvision/torchvision-0.8.2-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'torchvision' -version = '0.8.2' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://pytorch.org/' -description = """The torchvision package consists of popular datasets, model architectures, and common image -transformations for computer vision.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -download_dep_fail = True - -source_urls = [ - 'https://github.com/pytorch/vision/archive', -] -sources = [ - 'v%(version)s.tar.gz' -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), - ('PyTorch', '1.7.0', versionsuffix), - ('Pillow-SIMD', '7.0.0.post3', versionsuffix), -] - -options = {'modulename': 'torchvision'} - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -# Add a property -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'devel' diff --git a/Golden_Repo/t/trimesh/trimesh-3.8.11-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/t/trimesh/trimesh-3.8.11-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index ccedce421a3fe077761ecadd023e3b5fa80cb7c8..0000000000000000000000000000000000000000 --- a/Golden_Repo/t/trimesh/trimesh-3.8.11-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'trimesh' -version = '3.8.11' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://trimsh.org/' -description = """Trimesh is a Python (2.7- 3.3+) library for loading and using triangular meshes with an emphasis on -watertight meshes. The goal of the library is to provide a fully featured Trimesh object which allows for easy -manipulation and analysis, in the style of the excellent Polygon object in the Shapely library.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -source_urls = [PYPI_SOURCE] -sources = [SOURCE_TAR_GZ] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), # numpy required -] - -sanity_check_paths = { - 'files': [], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/u/UCX/UCX-1.10.1.eb b/Golden_Repo/u/UCX/UCX-1.10.1.eb deleted file mode 100644 index 9e9019f50f1b862a15738c6e77c48780ae972e38..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UCX/UCX-1.10.1.eb +++ /dev/null @@ -1,75 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.10.1' - -homepage = 'https://www.openucx.org' -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] -checksums = ['ae9a108af6842ca135e7ec9b6131469adf9f1e50f899349fafcc69a215368bc9'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.14'), - ('CUDA', '11.3'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -# Enable machine-specific optimizations, default: NO -configopts += '--enable-optimizations ' -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -# Enable thread support in UCP and UCT, default: NO -configopts += '--enable-mt ' -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -# Compile with IB Reliable Connection support -configopts += '--with-rc ' -# Compile with IB Unreliable Datagram support -configopts += '--with-ud ' -# Compile with IB Dynamic Connection support -configopts += '--with-dc ' -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -configopts += '--without-cm ' # Compile without IB Connection Manager support - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Golden_Repo/u/UCX/UCX-1.8.1.eb b/Golden_Repo/u/UCX/UCX-1.8.1.eb deleted file mode 100644 index 985e297badf1ca9b5c749a664f1b964beca7ad53..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UCX/UCX-1.8.1.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.8.1' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -# configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-cm ' # Compile with IB Connection Manager support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Golden_Repo/u/UCX/UCX-1.9.0.eb b/Golden_Repo/u/UCX/UCX-1.9.0.eb deleted file mode 100644 index 445a48c4a9065650b630c153d8a72162c268a9d5..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UCX/UCX-1.9.0.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.9.0' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-cm ' # Compile with IB Connection Manager support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCC-10.3.0.eb b/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCC-10.3.0.eb deleted file mode 100644 index d334f43d9b36cf59f6c6a76a49edc06b117bf089..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCC-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg, Ghent University -# Authors:: Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste (Ghent University) -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-97.html -## - -easyblock = 'ConfigureMake' - -name = 'UDUNITS' -version = '2.2.26' - -homepage = 'http://www.unidata.ucar.edu/software/udunits/' -description = """UDUNITS supports conversion of unit specifications between formatted and binary forms, - arithmetic manipulation of units, and conversion of values between compatible scales of measurement. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['ftp://ftp.unidata.ucar.edu/pub/udunits'] - -dependencies = [('expat', '2.2.9')] - -sanity_check_paths = { - 'files': ['bin/udunits2', 'include/converter.h', 'include/udunits2.h', 'include/udunits.h', - 'lib/libudunits2.a', 'lib/libudunits2.%s' % SHLIB_EXT], - 'dirs': ['share'], -} - -parallel = 1 - -moduleclass = 'phys' diff --git a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCC-9.3.0.eb b/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCC-9.3.0.eb deleted file mode 100644 index 99a9d5549b79f5c7a2531afaff892692fabc5175..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCC-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg, Ghent University -# Authors:: Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste (Ghent University) -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-97.html -## - -easyblock = 'ConfigureMake' - -name = 'UDUNITS' -version = '2.2.26' - -homepage = 'http://www.unidata.ucar.edu/software/udunits/' -description = """UDUNITS supports conversion of unit specifications between formatted and binary forms, - arithmetic manipulation of units, and conversion of values between compatible scales of measurement. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['ftp://ftp.unidata.ucar.edu/pub/udunits'] - -dependencies = [('expat', '2.2.9')] - -sanity_check_paths = { - 'files': ['bin/udunits2', 'include/converter.h', 'include/udunits2.h', 'include/udunits.h', - 'lib/libudunits2.a', 'lib/libudunits2.%s' % SHLIB_EXT], - 'dirs': ['share'], -} - -parallel = 1 - -moduleclass = 'phys' diff --git a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCCcore-10.3.0.eb b/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCCcore-10.3.0.eb deleted file mode 100644 index 3fcd11aa4f267c3c5f4b089c0bc3b5d9f405a1c5..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCCcore-10.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg, Ghent University -# Authors:: Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste (Ghent University) -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-97.html -## - -easyblock = 'ConfigureMake' - -name = 'UDUNITS' -version = '2.2.26' - -homepage = 'http://www.unidata.ucar.edu/software/udunits/' -description = """UDUNITS supports conversion of unit specifications between formatted and binary forms, - arithmetic manipulation of units, and conversion of values between compatible scales of measurement. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['ftp://ftp.unidata.ucar.edu/pub/udunits'] - -builddependencies = [('binutils', '2.36.1')] -dependencies = [('expat', '2.2.9')] - -sanity_check_paths = { - 'files': ['bin/udunits2', 'include/converter.h', 'include/udunits2.h', 'include/udunits.h', - 'lib/libudunits2.a', 'lib/libudunits2.%s' % SHLIB_EXT], - 'dirs': ['share'], -} - -parallel = 1 - -moduleclass = 'phys' diff --git a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCCcore-9.3.0.eb b/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCCcore-9.3.0.eb deleted file mode 100644 index eb012fc637df01c285b2f77756b239865731ed90..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-GCCcore-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg, Ghent University -# Authors:: Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste (Ghent University) -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-97.html -## - -easyblock = 'ConfigureMake' - -name = 'UDUNITS' -version = '2.2.26' - -homepage = 'http://www.unidata.ucar.edu/software/udunits/' -description = """UDUNITS supports conversion of unit specifications between formatted and binary forms, - arithmetic manipulation of units, and conversion of values between compatible scales of measurement. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['ftp://ftp.unidata.ucar.edu/pub/udunits'] - -builddependencies = [('binutils', '2.34')] -dependencies = [('expat', '2.2.9')] - -sanity_check_paths = { - 'files': ['bin/udunits2', 'include/converter.h', 'include/udunits2.h', 'include/udunits.h', - 'lib/libudunits2.a', 'lib/libudunits2.%s' % SHLIB_EXT], - 'dirs': ['share'], -} - -parallel = 1 - -moduleclass = 'phys' diff --git a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-iccifort-2020.2.254-GCC-9.3.0.eb b/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index cda184debca0e301422072f087152f9d3a4bad46..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg, Ghent University -# Authors:: Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste (Ghent University) -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-97.html -## - -easyblock = 'ConfigureMake' - -name = 'UDUNITS' -version = '2.2.26' - -homepage = 'http://www.unidata.ucar.edu/software/udunits/' -description = """UDUNITS supports conversion of unit specifications between formatted and binary forms, - arithmetic manipulation of units, and conversion of values between compatible scales of measurement. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['ftp://ftp.unidata.ucar.edu/pub/udunits'] - -dependencies = [('expat', '2.2.9')] - -sanity_check_paths = { - 'files': ['bin/udunits2', 'include/converter.h', 'include/udunits2.h', 'include/udunits.h', - 'lib/libudunits2.a', 'lib/libudunits2.%s' % SHLIB_EXT], - 'dirs': ['share'], -} - -parallel = 1 - -moduleclass = 'phys' diff --git a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index 18f6eac852a84603a232982302dddffc2a10e373..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/UDUNITS/UDUNITS-2.2.26-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -## -# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild -# -# Copyright:: Copyright 2012-2013 University of Luxembourg, Ghent University -# Authors:: Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste (Ghent University) -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of the policy: -# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-97.html -## - -easyblock = 'ConfigureMake' - -name = 'UDUNITS' -version = '2.2.26' - -homepage = 'http://www.unidata.ucar.edu/software/udunits/' -description = """UDUNITS supports conversion of unit specifications between formatted and binary forms, - arithmetic manipulation of units, and conversion of values between compatible scales of measurement. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['ftp://ftp.unidata.ucar.edu/pub/udunits'] - -dependencies = [('expat', '2.2.9')] - -sanity_check_paths = { - 'files': ['bin/udunits2', 'include/converter.h', 'include/udunits2.h', 'include/udunits.h', - 'lib/libudunits2.a', 'lib/libudunits2.%s' % SHLIB_EXT], - 'dirs': ['share'], -} - -parallel = 1 - -moduleclass = 'phys' diff --git a/Golden_Repo/u/uftp/uftp-1.4.1.eb b/Golden_Repo/u/uftp/uftp-1.4.1.eb deleted file mode 100644 index 257e71827c282790407cfe2c298b5a07f5f9059d..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/uftp/uftp-1.4.1.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'Tarball' - -name = 'uftp' -version = '1.4.1' - -homepage = 'https://unicore-dev.zam.kfa-juelich.de/documentation/uftpclient-%(version)s/uftpclient-manual.html' -description = """The UFTP standalone client provides high-performance file transfer.""" -site_contacts = 'b.schuller@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [ - 'https://master.dl.sourceforge.net/project/unicore/Clients/UFTP-Client/%(version)s/'] -sources = ['uftp-client-%(version)s-all.zip'] -checksums = ['85a20315f205dbd452729cf0dfe5b4b861988b5eca1726cbf81391a98c8326cc'] - -dependencies = [ - ('Java', '15'), -] - -postinstallcmds = [ - 'chmod +x %(installdir)s/bin/uftp', -] - -modextravars = { - 'UFTP_SHARE_URL': 'https://uftp.fz-juelich.de:7112/UFTP_Auth/rest/share/JUDAC', - 'UFTP_JUDAC': 'https://uftp.fz-juelich.de:7112/UFTP_Auth/rest/auth/JUDAC:' -} - -sanity_check_paths = { - 'files': ['bin/uftp'], - 'dirs': ['bin'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/u/uglifyjs/uglifyjs-3.11.4-GCCcore-10.3.0.eb b/Golden_Repo/u/uglifyjs/uglifyjs-3.11.4-GCCcore-10.3.0.eb deleted file mode 100644 index 2795a76df5f9fdeaad6912ec6923b16f055e83b7..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/uglifyjs/uglifyjs-3.11.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'Binary' - -name = 'uglifyjs' -version = '3.11.4' - -homepage = 'https://github.com/mishoo/UglifyJS' -description = """UglifyJS is a JavaScript parser, minifier, compressor and beautifier toolkit.""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -site_contacts = 'j.goebbert@fz-juelich.de' - -source_urls = ['https://github.com/mishoo/UglifyJS/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['9220bf2c4b11ba225d49a8fecf161501802246f16c00d6b0b0a49ab06fc90b02'] - -builddependencies = [('binutils', '2.36.1')] - -dependencies = [ - ('nodejs', '12.19.0'), -] - -install_cmd = 'npm install --no-package-lock -g --prefix %(installdir)s uglify-js@%(version)s v%(version)s.tar.gz' - -sanity_check_paths = { - 'files': ['bin/%(namelower)s'], - 'dirs': ['lib/node_modules/uglify-js'], -} -sanity_check_commands = ['%(namelower)s --help'] - -moduleclass = 'vis' diff --git a/Golden_Repo/u/uglifyjs/uglifyjs-3.11.4-GCCcore-9.3.0.eb b/Golden_Repo/u/uglifyjs/uglifyjs-3.11.4-GCCcore-9.3.0.eb deleted file mode 100644 index 9e483332372855c37f319c94bff57adfb45b0015..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/uglifyjs/uglifyjs-3.11.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'Binary' - -name = 'uglifyjs' -version = '3.11.4' - -homepage = 'https://github.com/mishoo/UglifyJS' -description = """UglifyJS is a JavaScript parser, minifier, compressor and beautifier toolkit.""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -site_contacts = 'j.goebbert@fz-juelich.de' - -source_urls = ['https://github.com/mishoo/UglifyJS/archive/'] -sources = ['v%(version)s.tar.gz'] -checksums = ['9220bf2c4b11ba225d49a8fecf161501802246f16c00d6b0b0a49ab06fc90b02'] - -builddependencies = [('binutils', '2.34')] - -dependencies = [ - ('nodejs', '12.19.0'), -] - -install_cmd = 'npm install --no-package-lock -g --prefix %(installdir)s uglify-js@%(version)s v%(version)s.tar.gz' - -sanity_check_paths = { - 'files': ['bin/%(namelower)s'], - 'dirs': ['lib/node_modules/uglify-js'], -} -sanity_check_commands = ['%(namelower)s --help'] - -moduleclass = 'vis' diff --git a/Golden_Repo/u/unzip/unzip-6.0-GCCcore-10.3.0.eb b/Golden_Repo/u/unzip/unzip-6.0-GCCcore-10.3.0.eb deleted file mode 100644 index 86d1961b5db80ae53613c3cfcde27b25d0703573..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/unzip/unzip-6.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'unzip' -version = '6.0' - -homepage = 'http://www.info-zip.org/' -description = """UnZip is an extraction utility for archives compressed in -.zip format (also called "zipfiles") -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['ftp://ftp.info-zip.org/pub/infozip/src/'] -sources = ['%(name)s%(version_major)s%(version_minor)s.tgz'] - -skipsteps = ['configure'] - -builddependencies = [ - ('binutils', '2.36.1') -] - -prebuildopts = 'ln -s unix/Makefile && ' - -buildopts = 'generic CC=$CC' - -installopts = 'prefix=%(installdir)s' - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["unzip"]], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/u/unzip/unzip-6.0-GCCcore-9.3.0.eb b/Golden_Repo/u/unzip/unzip-6.0-GCCcore-9.3.0.eb deleted file mode 100644 index c4965b59696bdabfb15163f55beff4cee8d9ac93..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/unzip/unzip-6.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'unzip' -version = '6.0' - -homepage = 'http://www.info-zip.org/' -description = """UnZip is an extraction utility for archives compressed in -.zip format (also called "zipfiles") -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['ftp://ftp.info-zip.org/pub/infozip/src/'] -sources = ['%(name)s%(version_major)s%(version_minor)s.tgz'] - -skipsteps = ['configure'] - -builddependencies = [ - ('binutils', '2.34') -] - -prebuildopts = 'ln -s unix/Makefile && ' - -buildopts = 'generic CC=$CC' - -installopts = 'prefix=%(installdir)s' - -sanity_check_paths = { - 'files': ["bin/%s" % x for x in ["unzip"]], - 'dirs': [], -} - -moduleclass = 'data' diff --git a/Golden_Repo/u/util-linux/util-linux-2.36-GCCcore-10.3.0.eb b/Golden_Repo/u/util-linux/util-linux-2.36-GCCcore-10.3.0.eb deleted file mode 100644 index 19835cd36cf8fd09ea6d6bd8278faa70ab92781e..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/util-linux/util-linux-2.36-GCCcore-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'util-linux' -version = '2.36' - -homepage = 'http://www.kernel.org/pub/linux/utils/util-linux' -description = "Set of Linux utilities" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['%s/v%%(version_major_minor)s' % homepage] -sources = [SOURCELOWER_TAR_GZ] - -# disable account related utilities (they need OS dependant pam-devel files) -# disable wall and friends (requires group changing permissions for install user) -# install systemd service files in install dir -# install bash completion files in install dir -configopts = "--disable-chfn-chsh --disable-login --disable-su --disable-rfkill " -configopts += "--disable-wall --disable-use-tty-group " -configopts += "--disable-makeinstall-chown --disable-makeinstall-setuid " -configopts += "--with-systemdsystemunitdir='${prefix}/systemd' " -configopts += "--with-bashcompletiondir='${prefix}/share/bash-completion/completions' " -# disable building Python bindings (since we don't include Python as a dep) -configopts += "--without-python " - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('ncurses', '6.2'), - ('zlib', '1.2.11'), -] - -sanity_check_paths = { - 'files': ['lib/lib%s.a' % x for x in ['blkid', 'mount', 'uuid']], - 'dirs': ['include', 'bin', 'share', 'sbin'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/u/util-linux/util-linux-2.36-GCCcore-9.3.0.eb b/Golden_Repo/u/util-linux/util-linux-2.36-GCCcore-9.3.0.eb deleted file mode 100644 index bc070c61c66002cd0defc0ae280d8a0d4f97a07c..0000000000000000000000000000000000000000 --- a/Golden_Repo/u/util-linux/util-linux-2.36-GCCcore-9.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'util-linux' -version = '2.36' - -homepage = 'http://www.kernel.org/pub/linux/utils/util-linux' -description = "Set of Linux utilities" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['%s/v%%(version_major_minor)s' % homepage] -sources = [SOURCELOWER_TAR_GZ] - -# disable account related utilities (they need OS dependant pam-devel files) -# disable wall and friends (requires group changing permissions for install user) -# install systemd service files in install dir -# install bash completion files in install dir -configopts = "--disable-chfn-chsh --disable-login --disable-su --disable-rfkill " -configopts += "--disable-wall --disable-use-tty-group " -configopts += "--disable-makeinstall-chown --disable-makeinstall-setuid " -configopts += "--with-systemdsystemunitdir='${prefix}/systemd' " -configopts += "--with-bashcompletiondir='${prefix}/share/bash-completion/completions' " -# disable building Python bindings (since we don't include Python as a dep) -configopts += "--without-python " - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('ncurses', '6.2'), - ('zlib', '1.2.11'), -] - -sanity_check_paths = { - 'files': ['lib/lib%s.a' % x for x in ['blkid', 'mount', 'uuid']], - 'dirs': ['include', 'bin', 'share', 'sbin'], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/v/VMD/VMD-1.9.3_plugins.patch b/Golden_Repo/v/VMD/VMD-1.9.3_plugins.patch deleted file mode 100644 index f4cf63549263fd9462ccfefa556b6508369e9a25..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.3_plugins.patch +++ /dev/null @@ -1,29 +0,0 @@ -Fix hard coded compiler, flags and tcl lib version for plugins - -Ake Sandgren, 20190823 ---- plugins/Make-arch.orig 2016-10-21 23:34:39.000000000 +0200 -+++ plugins/Make-arch 2019-08-23 10:45:51.403545042 +0200 -@@ -337,17 +337,17 @@ - "ARCH = LINUXAMD64" \ - "COPTO = -fPIC -m64 -o " \ - "LOPTO = -fPIC -m64 -lstdc++ -o " \ -- "CC = gcc" \ -- "CXX = g++" \ -+ "CC = $(CC)" \ -+ "CXX = $(CXX)" \ - "DEF = -D" \ -- "CCFLAGS = -m64 -O2 -fPIC -Wall" \ -- "CXXFLAGS = -m64 -O2 -fPIC -Wall" \ -- "TCLLDFLAGS = -ltcl8.5 -ldl" \ -+ "CCFLAGS = $(CFLAGS)" \ -+ "CXXFLAGS = $(CXXFLAGS)" \ -+ "TCLLDFLAGS = $(TCLLDFLAGS)" \ - "NETCDFLDFLAGS = -lnetcdf " \ - "AR = ar" \ - "NM = nm -p" \ - "RANLIB = touch" \ -- "SHLD = gcc -shared" -+ "SHLD = $(CC) -shared" - - LINUXCARMA: - $(MAKE) dynlibs staticlibs bins \ diff --git a/Golden_Repo/v/VMD/VMD-1.9.3_stride_MAX_AT_IN_RES.patch b/Golden_Repo/v/VMD/VMD-1.9.3_stride_MAX_AT_IN_RES.patch deleted file mode 100644 index 9011384eff52af2075f6d7977d0b0de7e5bad400..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.3_stride_MAX_AT_IN_RES.patch +++ /dev/null @@ -1,15 +0,0 @@ -Increase number of atoms allowed per residues as per stride README from VMD - -Åke Sandgren, 2017-05-02 -diff -ru vmd-1.9.3.orig/lib/stride/stride.h vmd-1.9.3/lib/stride/stride.h ---- vmd-1.9.3.orig/lib/stride/stride.h 2017-05-02 13:47:26.484463970 +0200 -+++ vmd-1.9.3/lib/stride/stride.h 2017-05-02 13:47:43.748279797 +0200 -@@ -40,7 +40,7 @@ - #define MAX_BOND 100 - #define MAX_ASSIGN 500 - #define MAX_INFO 1000 --#define MAX_AT_IN_RES 75 -+#define MAX_AT_IN_RES 100 - #define MAX_AT_IN_HETERORES 200 - #define MAXRESDNR 6 - #define MAXRESACC 6 diff --git a/Golden_Repo/v/VMD/VMD-1.9.3_stride_Makefile.patch b/Golden_Repo/v/VMD/VMD-1.9.3_stride_Makefile.patch deleted file mode 100644 index 036430db8ffe77867d6e4ebf20fc57e422ff95f3..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.3_stride_Makefile.patch +++ /dev/null @@ -1,37 +0,0 @@ -Make stride use CC, CFLAGS and LDFLAGS from EB. - -Åke Sandgren, 2017-05-02 -diff -ru vmd-1.9.3.orig/lib/stride/Makefile vmd-1.9.3/lib/stride/Makefile ---- vmd-1.9.3.orig/lib/stride/Makefile 2003-04-08 14:03:14.000000000 +0200 -+++ vmd-1.9.3/lib/stride/Makefile 2017-05-02 13:46:01.973365383 +0200 -@@ -1,13 +1,14 @@ - #FLAGS = -lm -L/usr/pub/lib -lefence -o - #CC = cc -O2 -fullwarn -TENV:large_GOT - #CC = cc -g -Wall --CC = gcc -O2 # at least for SunOS -+#CC = gcc -O2 # at least for SunOS - #CC = cc -g - - #CC = cc -O2 -fullwarn - - #CC = cc -O2 --FLAGS = -lm -o -+#FLAGS = -lm -o -+LIBS = -lm - - SOURCE = stride.c splitstr.c rdpdb.c initchn.c geometry.c thr2one.c one2thr.c filename.c tolostr.c strutil.c place_h.c hbenergy.c memory.c helix.c sheet.c rdmap.c phipsi.c command.c molscr.c die.c hydrbond.c mergepat.c fillasn.c escape.c p_jrnl.c p_rem.c p_atom.c p_helix.c p_sheet.c p_turn.c p_ssbond.c p_expdta.c p_model.c p_compnd.c report.c nsc.c area.c ssbond.c chk_res.c chk_atom.c turn.c pdbasn.c dssp.c outseq.c chkchain.c elem.c measure.c asngener.c p_endmdl.c stred.c contact_order.c contact_map.c - -@@ -15,12 +16,9 @@ - - BINDIR = . - --.c.o: -- $(CC) -c $< -o $@ -- - - stride : $(OBJECT) -- $(CC) $(OBJECT) $(FLAGS) $(BINDIR)/stride${ARCH} -+ $(CC) $(LDFLAGS) $(OBJECT) $(LIBS) -o stride - - $(OBJECT) : stride.h protot.h - diff --git a/Golden_Repo/v/VMD/VMD-1.9.3_surf_Makefile.patch b/Golden_Repo/v/VMD/VMD-1.9.3_surf_Makefile.patch deleted file mode 100644 index 93f430a64a158b7c7ca07cfee06a7e00b5251307..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.3_surf_Makefile.patch +++ /dev/null @@ -1,112 +0,0 @@ -Fix surf Makefile. -Use CC, CFLAGS, etc from EB. -Drop bad make depend lines. - -Åke Sandgren, 2017-05-02 -diff -ru vmd-1.9.3.orig/lib/surf/Makefile vmd-1.9.3/lib/surf/Makefile ---- vmd-1.9.3.orig/lib/surf/Makefile 1994-03-22 16:44:20.000000000 +0100 -+++ vmd-1.9.3/lib/surf/Makefile 2017-05-02 13:41:51.911991381 +0200 -@@ -1,12 +1,7 @@ - # Compilation flags --#CC = cc --CC = cc - INCLUDE = -I. --#LINCLUDE = -lcurses -ltermcap -lm --LINCLUDE = -lm --OPT_CFLAGS = -O2 $(FLAGS) $(INCLUDE) --#CFLAGS = -g $(FLAGS) $(INCLUDE) --CFLAGS = -O2 $(FLAGS) $(INCLUDE) -+LIBS = -lm -+CFLAGS = $(OPT) $(INCLUDE) - - # These are the user object files in the application - SRCS = surf.c io.c compute.c dual.c utils.c lp.c chull.c tessel_cases.c \ -@@ -18,7 +13,7 @@ - - # make objects - surf: $(OBJS) Makefile -- $(CC) $(CFLAGS) $(OBJS) -o surf $(LINCLUDE) -+ $(CC) $(LDFLAGS) $(OBJS) -o surf $(LIBS) - - lint: - lint $(INCLUDE) $(SRCS) -@@ -30,9 +25,6 @@ - tar -cvf surf.tar README *.[hc] Makefile - compress surf.tar - --.c.o: -- $(CC) $(CFLAGS) -c $*.c -- - - # make depend makes the proper include file dependencies. You _could_ run - # it on a sun4, but there's a bug in the SunOS version of sed that causes -@@ -61,48 +53,3 @@ - @ echo ' ' >> Makefile - - # DO NOT DELETE THIS LINE -- make depend depends on it. -- -- --# DO NOT DELETE THIS LINE -- make depend depends on it. -- --surf.o: surf.h /usr/include/stdio.h /usr/include/math.h /usr/include/stdlib.h --surf.o: /usr/include/sgidefs.h /usr/include/string.h /usr/include/sys/time.h --surf.o: linalg.h --io.o: surf.h /usr/include/stdio.h /usr/include/math.h /usr/include/stdlib.h --io.o: /usr/include/sgidefs.h /usr/include/string.h /usr/include/sys/time.h --io.o: linalg.h --compute.o: surf.h /usr/include/stdio.h /usr/include/math.h --compute.o: /usr/include/stdlib.h /usr/include/sgidefs.h /usr/include/string.h --compute.o: /usr/include/sys/time.h linalg.h chull.h dual.h --dual.o: surf.h /usr/include/stdio.h /usr/include/math.h /usr/include/stdlib.h --dual.o: /usr/include/sgidefs.h /usr/include/string.h /usr/include/sys/time.h --dual.o: linalg.h dual.h chull.h --utils.o: surf.h /usr/include/stdio.h /usr/include/math.h --utils.o: /usr/include/stdlib.h /usr/include/sgidefs.h /usr/include/string.h --utils.o: /usr/include/sys/time.h linalg.h --lp.o: surf.h /usr/include/stdio.h /usr/include/math.h /usr/include/stdlib.h --lp.o: /usr/include/sgidefs.h /usr/include/string.h /usr/include/sys/time.h --lp.o: linalg.h --chull.o: surf.h /usr/include/stdio.h /usr/include/math.h --chull.o: /usr/include/stdlib.h /usr/include/sgidefs.h /usr/include/string.h --chull.o: /usr/include/sys/time.h linalg.h chull.h --tessel_cases.o: surf.h /usr/include/stdio.h /usr/include/math.h --tessel_cases.o: /usr/include/stdlib.h /usr/include/sgidefs.h --tessel_cases.o: /usr/include/string.h /usr/include/sys/time.h linalg.h dual.h --tessel_patches.o: surf.h /usr/include/stdio.h /usr/include/math.h --tessel_patches.o: /usr/include/stdlib.h /usr/include/sgidefs.h --tessel_patches.o: /usr/include/string.h /usr/include/sys/time.h linalg.h --tessel_convex.o: surf.h /usr/include/stdio.h /usr/include/math.h --tessel_convex.o: /usr/include/stdlib.h /usr/include/sgidefs.h --tessel_convex.o: /usr/include/string.h /usr/include/sys/time.h linalg.h --tessel_concave.o: surf.h /usr/include/stdio.h /usr/include/math.h --tessel_concave.o: /usr/include/stdlib.h /usr/include/sgidefs.h --tessel_concave.o: /usr/include/string.h /usr/include/sys/time.h linalg.h --tessel_torus.o: surf.h /usr/include/stdio.h /usr/include/math.h --tessel_torus.o: /usr/include/stdlib.h /usr/include/sgidefs.h --tessel_torus.o: /usr/include/string.h /usr/include/sys/time.h linalg.h -- --# DEPENDENCIES MUST END AT END OF FILE --# IF YOU PUT STUFF HERE IT WILL GO AWAY --# see make depend above -- -diff -ru vmd-1.9.3.orig/lib/surf/surf.c vmd-1.9.3/lib/surf/surf.c ---- vmd-1.9.3.orig/lib/surf/surf.c 1994-03-21 10:33:00.000000000 +0100 -+++ vmd-1.9.3/lib/surf/surf.c 2017-05-02 13:41:51.911991381 +0200 -@@ -7,7 +7,7 @@ - #define EXTERN - #include "surf.h" - --void -+int - main(ac,av) - int ac; - char* av[]; -@@ -56,6 +56,8 @@ - if (Write_Option == 2) output_dataset(); - - if (Write_Option) end_output_dataset(); -+ -+ return(0); - } - - diff --git a/Golden_Repo/v/VMD/VMD-1.9.3_surf_bad_printfs.patch b/Golden_Repo/v/VMD/VMD-1.9.3_surf_bad_printfs.patch deleted file mode 100644 index 9b9db3889bf08c48fb6a469303e1db1423d2b734..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.3_surf_bad_printfs.patch +++ /dev/null @@ -1,74 +0,0 @@ -Fix some bad printfs in surf. - -Åke Sandgren, 2017-05-02 -diff -ru vmd-1.9.3.orig/lib/surf/chull.c vmd-1.9.3/lib/surf/chull.c ---- vmd-1.9.3.orig/lib/surf/chull.c 1994-03-19 06:50:54.000000000 +0100 -+++ vmd-1.9.3/lib/surf/chull.c 2017-05-02 13:44:07.046582827 +0200 -@@ -378,7 +378,7 @@ - print_out( v ) - struct tvertex *v; - { -- fprintf( stderr, "\nAdding vertex %6x :\n", v ); -+ fprintf( stderr, "\nAdding vertex %6p :\n", v ); - print_verts(); - print_edges(); - print_fs(); -@@ -398,11 +398,11 @@ - temp = vertices; - fprintf (stderr, "Vertex List\n"); - if (vertices) do { -- fprintf(stderr," addr %6x\t", vertices ); -+ fprintf(stderr," addr %6p\t", vertices ); - fprintf(stderr,"(%g,%g,%g)",vertices->v[X], - vertices->v[Y], vertices->v[Z] ); - fprintf(stderr," active:%3d", vertices->active ); -- fprintf(stderr," duplicate:%5x", vertices->duplicate ); -+ fprintf(stderr," duplicate:%5p", vertices->duplicate ); - fprintf(stderr," mark:%2d\n", vertices->mark ); - vertices = vertices->next; - } while ( vertices != temp ); -@@ -424,13 +424,13 @@ - temp = edges; - fprintf (stderr, "Edge List\n"); - if (edges) do { -- fprintf( stderr, " addr: %6x\t", edges ); -+ fprintf( stderr, " addr: %6p\t", edges ); - fprintf( stderr, "adj: "); - for (i=0; i<3; ++i) -- fprintf( stderr, "%6x", edges->adjface[i] ); -+ fprintf( stderr, "%6p", edges->adjface[i] ); - fprintf( stderr, " endpts:"); - for (i=0; i<2; ++i) -- fprintf( stderr, "%8x", edges->endpts[i]); -+ fprintf( stderr, "%8p", edges->endpts[i]); - fprintf( stderr, " del:%3d\n", edges->deleted ); - edges = edges->next; - } while (edges != temp ); -@@ -452,13 +452,13 @@ - temp = faces; - fprintf (stderr, "Face List\n"); - if (faces) do { -- fprintf(stderr, " addr: %6x\t", faces ); -+ fprintf(stderr, " addr: %6p\t", faces ); - fprintf(stderr, " edges:"); - for( i=0; i<3; ++i ) -- fprintf(stderr, "%6x", faces->edg[i] ); -+ fprintf(stderr, "%6p", faces->edg[i] ); - fprintf(stderr, " vert:"); - for ( i=0; i<3; ++i) -- fprintf(stderr, "%6x", faces->vert[i] ); -+ fprintf(stderr, "%6p", faces->vert[i] ); - fprintf(stderr, " vis: %d\n", faces->visible ); - faces= faces->next; - } while ( faces != temp ); -@@ -552,8 +552,8 @@ - temp_v = temp_v->next; - } while ( temp_v != vertices ); - do { -- printf("3%5d%6d%6d\n", temp_f->vert[0]->vnum, -- temp_f->vert[1]->vnum, temp_f->vert[2]->vnum ); -+ printf("3%5d%6d%6d\n", temp_f->vert[0]->vnum[0], -+ temp_f->vert[1]->vnum[0], temp_f->vert[2]->vnum[0] ); - temp_f = temp_f->next; - } while ( temp_f != faces ); - } diff --git a/Golden_Repo/v/VMD/VMD-1.9.4a43-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/v/VMD/VMD-1.9.4a43-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index dfb320d98a37dab0b1408fa0557cf7633f4cdf08..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.4a43-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,66 +0,0 @@ -name = 'VMD' -version = '1.9.4a43' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://www.ks.uiuc.edu/Research/vmd' -description = """VMD is a molecular visualization program for displaying, animating, -and analyzing large biomolecular systems using 3-D graphics and built-in scripting.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} - -source_urls = [ - 'https://www.ks.uiuc.edu/Research/vmd/vmd-1.9.4/files/alpha/', - 'https://www.ks.uiuc.edu/Research/vmd/vmd-%(version)s/files/final', - 'http://webclu.bio.wzw.tum.de/stride/' -] -sources = [ - 'vmd-%(version)s.src.tar.gz', - {'filename': 'stride.tar.gz', 'extract_cmd': "tar -C vmd-%(version)s/lib/stride -xf %s"}, -] -patches = [ - ('VMD-1.9.3_plugins.patch'), - ('VMD-1.9.3_surf_Makefile.patch', 'vmd-%(version)s'), - ('VMD-1.9.3_surf_bad_printfs.patch', 'vmd-%(version)s'), - ('VMD-1.9.3_stride_Makefile.patch', 'vmd-%(version)s'), - ('VMD-1.9.3_stride_MAX_AT_IN_RES.patch', 'vmd-%(version)s'), - ('VMD-%(version)s_configure.patch', 'vmd-%(version)s'), - ('VMD-%(version)s_extra_colors.patch', 'vmd-%(version)s'), -] -checksums = [ - '84323b2c34db8ce5739372dd6e225ef1fa1dc5c4b82d3810d55923a653b1bdc0', # vmd-1.9.4a43.src.tar.gz - '51a8bc2988bb184bd08216124f61725225bb1a6f563bdf8cd35154cb5d621c1a', # stride.tar.gz - '85760d6ae838e2b09801e34b36b484532383f7aaf2e8634b3ef808002a92baa3', # VMD-1.9.3_plugins.patch - 'd5cfa88064b7cffbc75accd69707d4e45fda974e8127de9ab606fdad501bd68a', # VMD-1.9.3_surf_Makefile.patch - 'f3c2a8c155e38db8e644cee6a01f6beaea5988e72ac74cde26b71670b151cc34', # VMD-1.9.3_surf_bad_printfs.patch - 'eb194ac0d8c086b73f87b29f7d732687f902431b1cdfa139c090401fefdee51e', # VMD-1.9.3_stride_Makefile.patch - 'eff1ca00cec637a6c8a156b2fb038e078d1835ba0eb15a571ed820bca5a866d9', # VMD-1.9.3_stride_MAX_AT_IN_RES.patch - 'b2735cd79a4a2ecb9e1f0a1b974b3bc7dd6615ed375b69e155ec8481347e1fcf', # VMD-1.9.4_configure.patch - '253eba282b570eb00e4764f46f77fd5ca898d10360d5707dd50ad1f14615af80', # VMD-1.9.4_extra_colors.patch -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), - ('Tcl', '8.6.10'), - ('Tk', '8.6.10'), - ('FLTK', '1.3.5'), - ('X11', '20200222'), - ('fontconfig', '2.13.92'), - ('OpenGL', '2020'), - ('netCDF', '4.7.4', '-serial'), - ('FFmpeg', '4.4'), - ('ImageMagick', '7.0.10-25'), - ('ACTC', '1.1'), - ('OptiX', '6.5.0', '', SYSTEM), - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('CUDA', '11.3', '', SYSTEM), -] - -prebuildopts = ' NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - -moduleclass = 'vis' diff --git a/Golden_Repo/v/VMD/VMD-1.9.4a43-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/v/VMD/VMD-1.9.4a43-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 839bad4007e6dd32bd43c473e50d48310a5f9a92..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.4a43-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,66 +0,0 @@ -name = 'VMD' -version = '1.9.4a43' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://www.ks.uiuc.edu/Research/vmd' -description = """VMD is a molecular visualization program for displaying, animating, -and analyzing large biomolecular systems using 3-D graphics and built-in scripting.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -source_urls = [ - 'https://www.ks.uiuc.edu/Research/vmd/vmd-1.9.4/files/alpha/', - 'https://www.ks.uiuc.edu/Research/vmd/vmd-%(version)s/files/final', - 'http://webclu.bio.wzw.tum.de/stride/' -] -sources = [ - 'vmd-%(version)s.src.tar.gz', - {'filename': 'stride.tar.gz', 'extract_cmd': "tar -C vmd-%(version)s/lib/stride -xf %s"}, -] -patches = [ - ('VMD-1.9.3_plugins.patch'), - ('VMD-1.9.3_surf_Makefile.patch', 'vmd-%(version)s'), - ('VMD-1.9.3_surf_bad_printfs.patch', 'vmd-%(version)s'), - ('VMD-1.9.3_stride_Makefile.patch', 'vmd-%(version)s'), - ('VMD-1.9.3_stride_MAX_AT_IN_RES.patch', 'vmd-%(version)s'), - ('VMD-%(version)s_configure.patch', 'vmd-%(version)s'), - ('VMD-%(version)s_extra_colors.patch', 'vmd-%(version)s'), -] -checksums = [ - '84323b2c34db8ce5739372dd6e225ef1fa1dc5c4b82d3810d55923a653b1bdc0', # vmd-1.9.4a43.src.tar.gz - '51a8bc2988bb184bd08216124f61725225bb1a6f563bdf8cd35154cb5d621c1a', # stride.tar.gz - '85760d6ae838e2b09801e34b36b484532383f7aaf2e8634b3ef808002a92baa3', # VMD-1.9.3_plugins.patch - 'd5cfa88064b7cffbc75accd69707d4e45fda974e8127de9ab606fdad501bd68a', # VMD-1.9.3_surf_Makefile.patch - 'f3c2a8c155e38db8e644cee6a01f6beaea5988e72ac74cde26b71670b151cc34', # VMD-1.9.3_surf_bad_printfs.patch - 'eb194ac0d8c086b73f87b29f7d732687f902431b1cdfa139c090401fefdee51e', # VMD-1.9.3_stride_Makefile.patch - 'eff1ca00cec637a6c8a156b2fb038e078d1835ba0eb15a571ed820bca5a866d9', # VMD-1.9.3_stride_MAX_AT_IN_RES.patch - 'b2735cd79a4a2ecb9e1f0a1b974b3bc7dd6615ed375b69e155ec8481347e1fcf', # VMD-1.9.4_configure.patch - '253eba282b570eb00e4764f46f77fd5ca898d10360d5707dd50ad1f14615af80', # VMD-1.9.4_extra_colors.patch -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2020', versionsuffix), - ('Tcl', '8.6.10'), - ('Tk', '8.6.10'), - ('FLTK', '1.3.5'), - ('X11', '20200222'), - ('fontconfig', '2.13.92'), - ('OpenGL', '2020'), - ('netCDF', '4.7.4', '-serial'), - ('FFmpeg', '4.3.1'), - ('ImageMagick', '7.0.10-25'), - ('ACTC', '1.1'), - ('OptiX', '6.5.0', '', SYSTEM), - ('zlib', '1.2.11'), - ('libpng', '1.6.37'), - ('CUDA', '11.0', '', SYSTEM), -] - -prebuildopts = ' NVCC_GENCODE="-gencode=arch=compute_70,code=sm_70 \ - -gencode=arch=compute_75,code=sm_75 \ - -gencode=arch=compute_80,code=sm_80"' - -moduleclass = 'vis' diff --git a/Golden_Repo/v/VMD/VMD-1.9.4a43_configure.patch b/Golden_Repo/v/VMD/VMD-1.9.4a43_configure.patch deleted file mode 100644 index d42835b7a8c8d983eeacea58c09a87c72f68cb62..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.4a43_configure.patch +++ /dev/null @@ -1,205 +0,0 @@ ---- configure.orig 2020-12-31 17:02:47.266221000 +0100 -+++ configure 2021-01-02 17:45:22.669512696 +0100 -@@ -493,17 +493,18 @@ - - $arch_cc = "cc"; - $arch_ccpp = "CC"; --$arch_nvcc = "/usr/local/cuda-10.2/bin/nvcc"; -+$arch_nvcc = "nvcc"; - $arch_nvccflags = "-lineinfo --ptxas-options=-v " . -- "-gencode arch=compute_30,code=compute_30 " . -- "-gencode arch=compute_30,code=sm_35 " . -- "-gencode arch=compute_30,code=sm_37 " . - "-gencode arch=compute_50,code=compute_50 " . - "-gencode arch=compute_50,code=sm_50 " . - "-gencode arch=compute_60,code=compute_60 " . - "-gencode arch=compute_60,code=sm_60 " . - "-gencode arch=compute_70,code=compute_70 " . - "-gencode arch=compute_70,code=sm_70 " . -+ "-gencode arch=compute_75,code=compute_75 " . -+ "-gencode arch=compute_75,code=sm_75 " . -+ "-gencode arch=compute_80,code=compute_80 " . -+ "-gencode arch=compute_80,code=sm_80 " . - "--ftz=true "; - # "-gencode arch=compute_75,code=sm_75 " . - $arch_gcc = "gcc"; -@@ -629,17 +630,17 @@ - # location of Mesa library and include files; basically does the same - # as OpenGL. This is based on the default instructions from the Mesa - # README; the include files should by default be in /usr/local/include/GL. --$mesa_dir = "$vmd_library_dir/Mesa"; -+$mesa_dir = $ENV{'EBROOTOPENGL'}; - $mesa_include = "-I$mesa_dir/include"; - $mesa_library = "-L$mesa_dir/lib_$config_arch"; - #$mesa_libs = "-lMesaGL -lMesaGLU"; --$mesa_libs = "-lMesaGL"; -+$mesa_libs = "-lGL -lGLU"; - $mesa_defines = "-DUSELINEAXES -DVMDMESA -DVMDOPENGL"; - @mesa_cc = (); - @mesa_cu = (); --@mesa_ccpp = @opengl_ccpp; --@mesa_h = @opengl_h; --@mesa_extra = @opengl_extra; -+@mesa_ccpp = (); -+@mesa_h = (); -+@mesa_extra = (); - - - # -@@ -714,10 +715,10 @@ - - ################ FLTK GUI - $fltk_defines = "-DVMDGUI -DVMDFLTK"; --$fltk_dir = "$vmd_library_dir/fltk"; -+$fltk_dir = $ENV{'EBROOTFLTK'}; - $fltk_include = "-I$fltk_dir/include"; - $fltk_library = "-L$fltk_dir/$config_arch"; --$fltk_libs = "-lfltk -lX11"; -+$fltk_libs = "-lfltk -L$ENV{'EBROOTFONTCONFIG'}/lib -lfontconfig -lX11 -lXrender -lXft -lXfixes -lXcursor -lXinerama"; - #@fltk_cc = ('forms_ui.c'); - @fltk_cu = (); - @fltk_ccpp = ( 'ColorFltkMenu.C', -@@ -759,7 +760,6 @@ - $stock_tcl_include_dir=$ENV{"TCL_INCLUDE_DIR"} || "$vmd_library_dir/tcl/include"; - $stock_tcl_library_dir=$ENV{"TCL_LIBRARY_DIR"} || "$vmd_library_dir/tcl/lib_$config_arch"; - -- - # location of Tk (for TK option) - #$stock_tk_include_dir=$ENV{"TK_INCLUDE_DIR"} || "/usr/local/include"; - #$stock_tk_library_dir=$ENV{"TK_LIBRARY_DIR"} || "/usr/local/lib"; -@@ -777,8 +777,8 @@ - if ($config_tk) { $tcl_include .= " -I$stock_tk_include_dir"; } - $tcl_library = "-L$stock_tcl_library_dir"; - if ($config_tk) { $tcl_library .= " -L$stock_tk_library_dir"; } --$tcl_libs = "-ltcl8.5"; --if ($config_tk) { $tcl_libs = "-ltk8.5 -lX11 " . $tcl_libs; } -+$tcl_libs = "-ltcl8.6"; -+if ($config_tk) { $tcl_libs = "-ltk8.6 -lX11 " . $tcl_libs; } - - @tcl_cc = (); - @tcl_cu = (); -@@ -942,7 +942,7 @@ - # This option enables the use of CUDA GPU acceleration functions. - ####################### - $cuda_defines = "-DVMDCUDA -DMSMPOT_CUDA"; --$cuda_dir = "/usr/local/cuda-10.2"; -+$cuda_dir = "$ENV{'CUDA_HOME'}"; - $cuda_include = ""; - $cuda_library = ""; - $cuda_libs = "-Wl,-rpath -Wl,\$\$ORIGIN/ -lcudart_static -lrt"; -@@ -1151,7 +1151,7 @@ - # $liboptix_dir = "/usr/local/encap/NVIDIA-OptiX-SDK-5.0.1-linux64"; - # $liboptix_dir = "/usr/local/encap/NVIDIA-OptiX-SDK-5.1.0-linux64"; - # $liboptix_dir = "/usr/local/encap/NVIDIA-OptiX-SDK-6.0.0-linux64"; --$liboptix_dir = "/usr/local/encap/NVIDIA-OptiX-SDK-6.5.0-linux64"; -+$liboptix_dir = "$ENV{'EBROOTOPTIX'}"; - # $liboptix_dir = "/usr/local/encap/NVIDIA-OptiX-SDK-7.0.0-linux64"; - - # NCSA Blue Waters -@@ -1306,7 +1306,7 @@ - die "LIBPNG option requires ZLIB!"; - } - $libpng_defines = "-DVMDLIBPNG"; --$libpng_dir = "/Projects/vmd/vmd/lib/libpng"; -+$libpng_dir = "$ENV{'EBROOTLIBPNG'}"; - $libpng_include = "-I$libpng_dir/include"; - $libpng_library = "-L$libpng_dir/lib_$config_arch"; - $libpng_libs = "-lpng16"; -@@ -1334,7 +1334,7 @@ - # OPTIONAL COMPONENT: Data compresssion library - # This may be commented out if not required. - $zlib_defines = "-DVMDZLIB"; --$zlib_dir = "/Projects/vmd/vmd/lib/zlib"; -+$zlib_dir = "$ENV{'EBROOTZLIB'}"; - $zlib_include = "-I$zlib_dir/include"; - $zlib_library = "-L$zlib_dir/lib_$config_arch"; - $zlib_libs = "-lz"; -@@ -1525,7 +1525,7 @@ - # primitives. - ####################### - $actc_defines = "-DVMDACTC"; --$actc_dir = "$vmd_library_dir/actc"; -+$actc_dir = "$ENV{'EBROOTACTC'}"; - $actc_include = "-I$actc_dir/include"; - $actc_library = "-L$actc_dir/lib_$config_arch"; - $actc_libs = "-lactc"; -@@ -1540,7 +1540,7 @@ - # OPTIONAL COMPONENT: NetCDF I/O Library (Used by cdfplugin) - ####################### - $netcdf_defines = ""; --$netcdf_dir = "$vmd_library_dir/netcdf"; -+$netcdf_dir = "$ENV{'EBROOTNETCDF'}"; - $netcdf_include = "-I$netcdf_dir/include"; - $netcdf_library = "-L$netcdf_dir/lib_$config_arch"; - $netcdf_libs = "-lnetcdf"; -@@ -1593,18 +1593,18 @@ - # If left blank, standard system directories will be searched. - #$stock_python_include_dir=$ENV{"PYTHON_INCLUDE_DIR"} || "/usr/local/include"; - #$stock_python_library_dir=$ENV{"PYTHON_LIBRARY_DIR"} || "/usr/local/lib"; --$stock_python_include_dir=$ENV{"PYTHON_INCLUDE_DIR"} || "$vmd_library_dir/python/lib_$config_arch/include/python2.5"; --$stock_python_library_dir=$ENV{"PYTHON_LIBRARY_DIR"} || "$vmd_library_dir/python/lib_$config_arch/lib/python2.5/config"; -+$stock_python_include_dir=$ENV{'PYTHON_INCLUDE_DIR'} || "$vmd_library_dir/python/lib_$config_arch/include/python2.5"; -+$stock_python_library_dir=$ENV{'PYTHON_LIBRARY_DIR'} || "$vmd_library_dir/python/lib_$config_arch/lib/python2.5/config"; - - #$stock_numpy_include_dir=$ENV{"NUMPY_INCLUDE_DIR"} || "/usr/local/include"; - #$stock_numpy_library_dir=$ENV{"NUMPY_LIBRARY_DIR"} || "/usr/local/lib"; --$stock_numpy_include_dir=$ENV{"NUMPY_INCLUDE_DIR"} || "$vmd_library_dir/numpy/lib_$config_arch/include"; --$stock_numpy_library_dir=$ENV{"NUMPY_LIBRARY_DIR"} || "$vmd_library_dir/python/lib_$config_arch/lib/python2.5/site-packages/numpy/core/include"; -+$stock_numpy_include_dir=$ENV{'NUMPY_INCLUDE_DIR'} || "$vmd_library_dir/numpy/lib_$config_arch/include"; -+$stock_numpy_library_dir="$ENV{'NUMPY_INCLUDE_DIR'}/../lib" || "$vmd_library_dir/python/lib_$config_arch/lib/python2.5/site-packages/numpy/core/include"; - - $python_defines = "-DVMDPYTHON"; - $python_include = "-I$stock_python_include_dir -I$stock_numpy_include_dir -I$stock_numpy_library_dir"; - $python_library = "-L$stock_python_library_dir"; --$python_libs = "-lpython2.5 -lpthread"; -+$python_libs = "-lpython3.8 -lpthread"; - @python_h = ('PythonTextInterp.h', - 'VMDTkinterMenu.h', - 'py_commands.h', -@@ -2513,7 +2513,7 @@ - - if ($config_cuda) { - $arch_nvccflags .= " --machine 64 -O3 $cuda_include"; -- $cuda_library = "-L/usr/local/cuda-10.2/lib64"; -+ $cuda_library = "-L$ENV{'EBROOTCUDA'}/lib64"; - } - - $arch_lex = "flex"; # has problems with vendor lex -@@ -2522,8 +2522,8 @@ - # override code probably date back to RHEL4.x or earlier, and - # they likely serve no useful purpose going forward. - if (!$config_opengl_dispatch) { -- $opengl_dep_libs = "-L/usr/X11R6/lib64 -lGL -lX11"; -- $mesa_libs = "-lMesaGL -L/usr/X11R6/lib64 -lXext -lX11"; -+ $opengl_dep_libs = "-L$ENV{'EBROOTOPENGL'}/lib -lGL -L$ENV{'EBROOTX11'}/lib -lX11"; -+ $mesa_libs = "-lGL -lGLU -L$ENV{'EBROOTX11'}/lib -lXext -lX11"; - } - - # this is to make tcl happy -@@ -2551,7 +2551,7 @@ - - $arch_nvcc = "/usr/local/cuda/bin/nvcc"; - $arch_nvccflags = "--ptxas-options=-v " . -- "-gencode arch=compute_30,code=compute_30 " . -+ "-gencode arch=compute_50,code=compute_50 " . - "-gencode arch=compute_70,code=compute_70 " . - "--ftz=true "; - $cuda_library = "-L/usr/local/cuda/lib64"; -@@ -2913,7 +2913,7 @@ - - # Only generate code for SM 7.0 on Summit - $arch_nvccflags = "--ptxas-options=-v " . -- "-gencode arch=compute_30,code=compute_30 " . -+ "-gencode arch=compute_50,code=compute_50 " . - "-gencode arch=compute_70,code=sm_70 " . - "--ftz=true "; - $arch_nvccflags .= " --machine 64 -O3 $cuda_include"; -@@ -3541,7 +3541,7 @@ - - .cu.ptx: - \$(ECHO) "Compiling " \$< " --> " \$*.ptx " ..."; \\ -- \$(NVCC) \$(DEFINES) --use_fast_math $liboptix_include -gencode arch=compute_30,code=compute_30 -ptx \$< $arch_coptout$vmd_arch_dir/\$\@ -+ \$(NVCC) \$(DEFINES) --use_fast_math $liboptix_include -gencode arch=compute_60,code=compute_60 -ptx \$< $arch_coptout$vmd_arch_dir/\$\@ - - .y.o: - diff --git a/Golden_Repo/v/VMD/VMD-1.9.4a43_extra_colors.patch b/Golden_Repo/v/VMD/VMD-1.9.4a43_extra_colors.patch deleted file mode 100644 index 26e9ba954dbbbb816975a947d97c4d1a69ca1f95..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VMD/VMD-1.9.4a43_extra_colors.patch +++ /dev/null @@ -1,46 +0,0 @@ -Add some additional colors, e.g. to allow for color blind compatible rendering. -Bob Dröge, 2020-06-23 ---- src/Scene.C.orig 2020-06-23 09:37:41.000000000 +0200 -+++ src/Scene.C 2020-06-23 09:40:59.000000000 +0200 -@@ -63,7 +63,10 @@ - ,"yellow2", "yellow3", "green2", "green3", - "cyan2", "cyan3", "blue2", "blue3", - "violet", "violet2", "magenta", "magenta2", -- "red2", "red3", "orange2", "orange3" -+ "red2", "red3", "orange2", "orange3", -+ "matisse", "flamenco", "forest_green", "punch", -+ "wisteria", "spicy_mix", "orchid", "gray2", -+ "lime_pie", "java" - #endif - - }; -@@ -89,7 +92,17 @@ - 0.27f, 0.00f, 0.98f, 0.45f, 0.00f, 0.90f, // violet - 0.90f, 0.00f, 0.90f, 1.00f, 0.00f, 0.66f, // magenta - 0.98f, 0.00f, 0.23f, 0.81f, 0.00f, 0.00f, // red -- 0.89f, 0.35f, 0.00f, 0.96f, 0.72f, 0.00f // orange -+ 0.89f, 0.35f, 0.00f, 0.96f, 0.72f, 0.00f, // orange -+ 0.1f, 0.5f, 0.7f, // MPL1, matisse -+ 1.0f, 0.5f, 0.1f, // MPL2, flamenco -+ 0.2f, 0.6f, 0.2f, // MPL3, forest green -+ 0.8f, 0.2f, 0.2f, // MPL4, punch -+ 0.6f, 0.4f, 0.7f, // MPL5, wisteria -+ 0.5f, 0.3f, 0.3f, // MPL6, spicy mix -+ 0.9f, 0.5f, 0.8f, // MPL7, orchid -+ 0.5f, 0.5f, 0.5f, // MPL8, gray -+ 0.7f, 0.7f, 0.1f, // MPL9, key lime pie -+ 0.1f, 0.7f, 0.8f // MPL10, java - #endif - - }; ---- src/Scene.h.orig 2020-06-23 09:37:45.000000000 +0200 -+++ src/Scene.h 2020-06-23 09:42:21.000000000 +0200 -@@ -37,7 +37,7 @@ - #define DISP_LIGHTS 4 - - // total number of colors defined here --#define REGCLRS 33 -+#define REGCLRS 43 - #define EXTRACLRS 1 - #define VISCLRS (REGCLRS - EXTRACLRS) - #define MAPCLRS 1024 diff --git a/Golden_Repo/v/VTK/VTK-8.2.0-gpsmpi-2021-Python-3.8.5.eb b/Golden_Repo/v/VTK/VTK-8.2.0-gpsmpi-2021-Python-3.8.5.eb deleted file mode 100644 index 533b3d43b3e14e092b54cac5ca874d0249d1032d..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VTK/VTK-8.2.0-gpsmpi-2021-Python-3.8.5.eb +++ /dev/null @@ -1,188 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'VTK' -version = '8.2.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.vtk.org' -description = """The Visualization Toolkit (VTK) is an open-source, freely available software system for - 3D computer graphics, image processing and visualization. VTK consists of a C++ class library and several - interpreted interface layers including Tcl/Tk, Java, and Python. VTK supports a wide variety of visualization - algorithms including: scalar, vector, tensor, texture, and volumetric methods; and advanced modeling techniques - such as: implicit modeling, polygon reduction, mesh smoothing, cutting, contouring, and Delaunay triangulation.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://www.vtk.org/files/release/%(version_major_minor)s'] -sources = [ - SOURCE_TAR_GZ, - '%(name)sData-%(version)s.tar.gz', -] -patches = [ - 'VTK-8.2.0_python_3.8_compatibility.patch', ('vtk-version.egg-info', '.'), - 'VTK-8.2.0_gcc10.patch', -] - -checksums = [ - '34c3dc775261be5e45a8049155f7228b6bd668106c72a3c435d95730d17d57bb', # VTK-8.2.0.tar.gz - 'd1ff312f7a63d90d8b7033a99109801f16a462ae411d648642838aae04bcc21e', # VTKData-8.2.0.tar.gz - 'a7586f60501de145d4c31e48aa0589547d9fe7a39f96ab31dae8e82aa5fb4403', # VTK-8.2.0_python_3.8_compatibility.patch - '787b82415ae7a4a1f815b4db0e25f7abc809a05fc85d7d219627f3a7e5d3867b', # vtk-version.egg-info -] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('Python', '3.8.5'), - ('HDF5', '1.10.6'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('mpi4py', '3.0.3', versionsuffix), - ('libxc', '4.3.4'), - ('netCDF', '4.7.4'), - ('X11', '20200222'), - ('OpenGL', '2020'), -] - -separate_build_dir = True - -configopts = "-DCMAKE_BUILD_TYPE=Release " -configopts += "-DCMAKE_INSTALL_LIBDIR=lib " - -configopts += "-DVTK_USE_SYSTEM_MPI4PY=ON " -configopts += "-DVTK_USE_SYSTEM_LZMA=ON " -configopts += "-DVTK_USE_SYSTEM_HDF5=ON " -configopts += "-DVTK_USE_SYSTEM_NETCDF=ON " - -configopts += "-DBUILD_SHARED_LIBS=ON " -configopts += "-DBUILD_TESTING=OFF " - -configopts += "-DVTK_SMP_IMPLEMENTATION_TYPE=OPENMP " -configopts += "-DVTK_Group_MPI:BOOL=ON " -configopts += "-DVTK_Group_Web:BOOL=ON " - -configopts += '-DOpenGL_GL_PREFERENCE=GLVND ' # "GLVND" or "LEGACY" -configopts += "-DOPENGL_EGL_INCLUDE_DIR=$EBROOTOPENGL/include " -configopts += "-DOPENGL_GLX_INCLUDE_DIR=$EBROOTOPENGL/include " -configopts += "-DOPENGL_INCLUDE_DIR=$EBROOTOPENGL/include " -configopts += "-DOPENGL_egl_LIBRARY=$EBROOTOPENGL/lib/libEGL.so.1 " -configopts += "-DOPENGL_glx_LIBRARY=$EBROOTOPENGL/lib/libGLX.so.0 " -configopts += "-DOPENGL_opengl_LIBRARY=$EBROOTOPENGL/lib/libOpenGL.so.0 " -configopts += "-DOPENGL_glu_LIBRARY=$EBROOTOPENGL/lib/libGLU.so " - -configopts += "-DVTK_WRAP_PYTHON=ON " -configopts += "-DVTK_PYTHON_VERSION=%(pyshortver)s " -configopts += "-DPYTHON_EXECUTABLE:PATH=$EBROOTPYTHON/bin/python%(pyshortver)s " -configopts += "-DPYTHON_INCLUDE_DIR:PATH=$EBROOTPYTHON/include/python%(pyshortver)s " -configopts += "-DPYTHON_LIBRARY:PATH=$EBROOTPYTHON/lib/libpython%%(pyshortver)s.%s " % SHLIB_EXT - -configopts += "-DHDF5_INCLUDE_DIRS=$EBROOTHDF5/include " - -configopts += "-DModule_vtkAcceleratorsVTKm:BOOL=ON " -# configopts += "-DModule_vtkDomainsMicroscopy:BOOL=OFF " -# configopts += "-DModule_vtkDomainsParallelChemistry:BOOL=OFF " -# configopts += "-DModule_vtkFiltersOpenTurns:BOOL=OFF " -# configopts += "-DModule_vtkFiltersParallelDIY2:BOOL=OFF " -# configopts += "-DModule_vtkFiltersParallelFlowPaths:BOOL=OFF " -configopts += "-DModule_vtkFiltersParallelGeometry:BOOL=ON " -configopts += "-DModule_vtkFiltersParallelMPI:BOOL=ON " -configopts += "-DModule_vtkFiltersParallelStatistics:BOOL=ON " -# configopts += "-DModule_vtkFiltersParallelVerdict:BOOL=OFF " -# configopts += "-DModule_vtkFiltersReebGraph:BOOL=OFF " -# configopts += "-DModule_vtkGUISupportQt:BOOL=OFF " -# configopts += "-DModule_vtkGUISupportQtOpenGL:BOOL=OFF " -# configopts += "-DModule_vtkGUISupportQtSQL:BOOL=OFF " -# configopts += "-DModule_vtkGUISupportQtWebkit:BOOL=OFF " -# configopts += "-DModule_vtkGeovisGDAL:BOOL=OFF " -# configopts += "-DModule_vtkIOADIOS:BOOL=OFF " -# configopts += "-DModule_vtkIOFFMPEG:BOOL=OFF " -# configopts += "-DModule_vtkIOGDAL:BOOL=OFF " -# configopts += "-DModule_vtkIOGeoJSON:BOOL=OFF " -# configopts += "-DModule_vtkIOLAS:BOOL=OFF " -# configopts += "-DModule_vtkIOMPIImage:BOOL=ON " -# configopts += "-DModule_vtkIOMPIParallel:BOOL=ON " -# configopts += "-DModule_vtkIOMotionFX:BOOL=OFF " -# configopts += "-DModule_vtkIOMySQL:BOOL=OFF " -# configopts += "-DModule_vtkIOODBC:BOOL=OFF " -# configopts += "-DModule_vtkIOPDAL:BOOL=OFF " -# configopts += "-DModule_vtkIOParallelExodus:BOOL=OFF " -# configopts += "-DModule_vtkIOParallelLSDyna:BOOL=OFF " -# configopts += "-DModule_vtkIOParallelNetCDF:BOOL=OFF " -# configopts += "-DModule_vtkIOParallelXdmf3:BOOL=OFF " -# configopts += "-DModule_vtkIOPostgreSQL:BOOL=OFF " -# configopts += "-DModule_vtkIOTRUCHAS:BOOL=OFF " -# configopts += "-DModule_vtkIOVPIC:BOOL=OFF " -# configopts += "-DModule_vtkIOXdmf2:BOOL=OFF " -# configopts += "-DModule_vtkIOXdmf3:BOOL=OFF " -# configopts += "-DModule_vtkImagingOpenGL2:BOOL=OFF " -# configopts += "-DModule_vtkInfovisBoost:BOOL=OFF " -# configopts += "-DModule_vtkInfovisBoostGraphAlg:BOOL=OFF -configopts += "-DModule_vtkParallelMPI:BOOL=ON " -configopts += "-DModule_vtkPython:BOOL=ON " -# configopts += "-DModule_vtkPythonInterpreter:BOOL=OFF " -# configopts += "-DModule_vtkRenderingExternal:BOOL=OFF " -# configopts += "-DModule_vtkRenderingFreeTypeFontConfig:BOOL=OFF " -# configopts += "-DModule_vtkRenderingLICOpenGL2:BOOL=OFF " -# configopts += "-DModule_vtkRenderingMatplotlib:BOOL=OFF " -# configopts += "-DModule_vtkRenderingOSPRay:BOOL=OFF " -# configopts += "-DModule_vtkRenderingOpenVR:BOOL=OFF " -# configopts += "-DModule_vtkRenderingOptiX:BOOL=OFF " -configopts += "-DModule_vtkRenderingParallel:BOOL=ON " -configopts += "-DModule_vtkRenderingParallelLIC:BOOL=ON " -# configopts += "-DModule_vtkRenderingQt:BOOL=OFF " -# configopts += "-DModule_vtkRenderingSceneGraph:BOOL=OFF " -# configopts += "-DModule_vtkRenderingTk:BOOL=OFF " -# configopts += "-DModule_vtkRenderingVolumeAMR:BOOL=OFF " -# configopts += "-DModule_vtkTclTk:BOOL=OFF " -# configopts += "-DModule_vtkTestingCore:BOOL=OFF " -# configopts += "-DModule_vtkTestingGenericBridge:BOOL=OFF " -# configopts += "-DModule_vtkTestingIOSQL:BOOL=OFF " -# configopts += "-DModule_vtkTestingRendering:BOOL=OFF " -# configopts += "-DModule_vtkUtilitiesBenchmarks:BOOL=OFF " -# configopts += "-DModule_vtkUtilitiesEncodeString:BOOL=OFF " -# configopts += "-DModule_vtkVPIC:BOOL=OFF " -configopts += "-DModule_vtkVTKm:BOOL=ON " -# configopts += "-DModule_vtkViewsGeovis:BOOL=OFF " -# configopts += "-DModule_vtkViewsQt:BOOL=OFF " -# configopts += "-DModule_vtkWebCore:BOOL=OFF " -# configopts += "-DModule_vtkWebGLExporter:BOOL=OFF " -# configopts += "-DModule_vtkWebPython:BOOL=OFF " -# configopts += "-DModule_vtkWrappingJava:BOOL=OFF " -# configopts += "-DModule_vtkWrappingPythonCore:BOOL=OFF " -# configopts += "-DModule_vtkWrappingTools:BOOL=OFF " -# configopts += "-DModule_vtkdiy2:BOOL=OFF " -# configopts += "-DModule_vtkkissfft:BOOL=OFF " -# configopts += "-DModule_vtkmpi4py:BOOL=OFF " -# configopts += "-DModule_vtkpegtl:BOOL=OFF " -# configopts += "-DModule_vtkxdmf2:BOOL=OFF " -# configopts += "-DModule_vtkxdmf3:BOOL=OFF " -# configopts += "-DModule_vtkzfp:BOOL=OFF " - -preinstallopts = "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " - -# Install a egg-info file so VTK is more python friendly, required for mayavi -local_egg_info_src = '%(builddir)s/VTK-%(version)s/vtk-version.egg-info' -local_egg_info_dest = '%(installdir)s/lib/python%(pyshortver)s/site-packages/vtk-%(version)s.egg-info' -postinstallcmds = [ - 'sed "s/#VTK_VERSION#/%%(version)s/" %s > %s' % (local_egg_info_src, local_egg_info_dest), -] - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -sanity_check_paths = { - 'files': ['bin/vtk%s-%%(version_major_minor)s' % x for x in - ['WrapPythonInit', 'WrapPython', 'WrapHierarchy']] + - ['bin/pvtkpython', 'bin/vtkpython'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/', 'include/vtk-%(version_major_minor)s'], -} - -sanity_check_commands = [ - ('python', "-c 'import %(namelower)s'"), - ('python', "-c 'import pkg_resources; pkg_resources.get_distribution(\"vtk\")'"), -] - -moduleclass = 'vis' diff --git a/Golden_Repo/v/VTK/VTK-8.2.0-gpsmpi-9.3.0-Python-3.8.5.eb b/Golden_Repo/v/VTK/VTK-8.2.0-gpsmpi-9.3.0-Python-3.8.5.eb deleted file mode 100644 index ab774b4ddadf39423d3e94b72405d2595ec59411..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VTK/VTK-8.2.0-gpsmpi-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,184 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'VTK' -version = '8.2.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://www.vtk.org' -description = """The Visualization Toolkit (VTK) is an open-source, freely available software system for - 3D computer graphics, image processing and visualization. VTK consists of a C++ class library and several - interpreted interface layers including Tcl/Tk, Java, and Python. VTK supports a wide variety of visualization - algorithms including: scalar, vector, tensor, texture, and volumetric methods; and advanced modeling techniques - such as: implicit modeling, polygon reduction, mesh smoothing, cutting, contouring, and Delaunay triangulation.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True} - -source_urls = ['https://www.vtk.org/files/release/%(version_major_minor)s'] -sources = [ - SOURCE_TAR_GZ, - '%(name)sData-%(version)s.tar.gz', -] -patches = ['VTK-8.2.0_python_3.8_compatibility.patch', ('vtk-version.egg-info', '.')] -checksums = [ - '34c3dc775261be5e45a8049155f7228b6bd668106c72a3c435d95730d17d57bb', # VTK-8.2.0.tar.gz - 'd1ff312f7a63d90d8b7033a99109801f16a462ae411d648642838aae04bcc21e', # VTKData-8.2.0.tar.gz - 'a7586f60501de145d4c31e48aa0589547d9fe7a39f96ab31dae8e82aa5fb4403', # VTK-8.2.0_python_3.8_compatibility.patch - '787b82415ae7a4a1f815b4db0e25f7abc809a05fc85d7d219627f3a7e5d3867b', # vtk-version.egg-info -] - -builddependencies = [ - ('CMake', '3.18.0'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('HDF5', '1.10.6'), - ('SciPy-Stack', '2020', versionsuffix, ('gcccoremkl', '9.3.0-2020.2.254')), - ('mpi4py', '3.0.3', versionsuffix), - ('libxc', '3.0.1'), - ('netCDF', '4.7.4'), - ('X11', '20200222'), - ('OpenGL', '2020'), -] - -separate_build_dir = True - -configopts = "-DCMAKE_BUILD_TYPE=Release " -configopts += "-DCMAKE_INSTALL_LIBDIR=lib " - -configopts += "-DVTK_USE_SYSTEM_MPI4PY=ON " -configopts += "-DVTK_USE_SYSTEM_LZMA=ON " -configopts += "-DVTK_USE_SYSTEM_HDF5=ON " -configopts += "-DVTK_USE_SYSTEM_NETCDF=ON " - -configopts += "-DBUILD_SHARED_LIBS=ON " -configopts += "-DBUILD_TESTING=OFF " - -configopts += "-DVTK_SMP_IMPLEMENTATION_TYPE=OPENMP " -configopts += "-DVTK_Group_MPI:BOOL=ON " -configopts += "-DVTK_Group_Web:BOOL=ON " - -configopts += '-DOpenGL_GL_PREFERENCE=GLVND ' # "GLVND" or "LEGACY" -configopts += "-DOPENGL_EGL_INCLUDE_DIR=$EBROOTOPENGL/include " -configopts += "-DOPENGL_GLX_INCLUDE_DIR=$EBROOTOPENGL/include " -configopts += "-DOPENGL_INCLUDE_DIR=$EBROOTOPENGL/include " -configopts += "-DOPENGL_egl_LIBRARY=$EBROOTOPENGL/lib/libEGL.so.1 " -configopts += "-DOPENGL_glx_LIBRARY=$EBROOTOPENGL/lib/libGLX.so.0 " -configopts += "-DOPENGL_opengl_LIBRARY=$EBROOTOPENGL/lib/libOpenGL.so.0 " -configopts += "-DOPENGL_glu_LIBRARY=$EBROOTOPENGL/lib/libGLU.so " - -configopts += "-DVTK_WRAP_PYTHON=ON " -configopts += "-DVTK_PYTHON_VERSION=%(pyshortver)s " -configopts += "-DPYTHON_EXECUTABLE:PATH=$EBROOTPYTHON/bin/python%(pyshortver)s " -configopts += "-DPYTHON_INCLUDE_DIR:PATH=$EBROOTPYTHON/include/python%(pyshortver)s " -configopts += "-DPYTHON_LIBRARY:PATH=$EBROOTPYTHON/lib/libpython%%(pyshortver)s.%s " % SHLIB_EXT - -configopts += "-DHDF5_INCLUDE_DIRS=$EBROOTHDF5/include " - -configopts += "-DModule_vtkAcceleratorsVTKm:BOOL=ON " -# configopts += "-DModule_vtkDomainsMicroscopy:BOOL=OFF " -# configopts += "-DModule_vtkDomainsParallelChemistry:BOOL=OFF " -# configopts += "-DModule_vtkFiltersOpenTurns:BOOL=OFF " -# configopts += "-DModule_vtkFiltersParallelDIY2:BOOL=OFF " -# configopts += "-DModule_vtkFiltersParallelFlowPaths:BOOL=OFF " -configopts += "-DModule_vtkFiltersParallelGeometry:BOOL=ON " -configopts += "-DModule_vtkFiltersParallelMPI:BOOL=ON " -configopts += "-DModule_vtkFiltersParallelStatistics:BOOL=ON " -# configopts += "-DModule_vtkFiltersParallelVerdict:BOOL=OFF " -# configopts += "-DModule_vtkFiltersReebGraph:BOOL=OFF " -# configopts += "-DModule_vtkGUISupportQt:BOOL=OFF " -# configopts += "-DModule_vtkGUISupportQtOpenGL:BOOL=OFF " -# configopts += "-DModule_vtkGUISupportQtSQL:BOOL=OFF " -# configopts += "-DModule_vtkGUISupportQtWebkit:BOOL=OFF " -# configopts += "-DModule_vtkGeovisGDAL:BOOL=OFF " -# configopts += "-DModule_vtkIOADIOS:BOOL=OFF " -# configopts += "-DModule_vtkIOFFMPEG:BOOL=OFF " -# configopts += "-DModule_vtkIOGDAL:BOOL=OFF " -# configopts += "-DModule_vtkIOGeoJSON:BOOL=OFF " -# configopts += "-DModule_vtkIOLAS:BOOL=OFF " -# configopts += "-DModule_vtkIOMPIImage:BOOL=ON " -# configopts += "-DModule_vtkIOMPIParallel:BOOL=ON " -# configopts += "-DModule_vtkIOMotionFX:BOOL=OFF " -# configopts += "-DModule_vtkIOMySQL:BOOL=OFF " -# configopts += "-DModule_vtkIOODBC:BOOL=OFF " -# configopts += "-DModule_vtkIOPDAL:BOOL=OFF " -# configopts += "-DModule_vtkIOParallelExodus:BOOL=OFF " -# configopts += "-DModule_vtkIOParallelLSDyna:BOOL=OFF " -# configopts += "-DModule_vtkIOParallelNetCDF:BOOL=OFF " -# configopts += "-DModule_vtkIOParallelXdmf3:BOOL=OFF " -# configopts += "-DModule_vtkIOPostgreSQL:BOOL=OFF " -# configopts += "-DModule_vtkIOTRUCHAS:BOOL=OFF " -# configopts += "-DModule_vtkIOVPIC:BOOL=OFF " -# configopts += "-DModule_vtkIOXdmf2:BOOL=OFF " -# configopts += "-DModule_vtkIOXdmf3:BOOL=OFF " -# configopts += "-DModule_vtkImagingOpenGL2:BOOL=OFF " -# configopts += "-DModule_vtkInfovisBoost:BOOL=OFF " -# configopts += "-DModule_vtkInfovisBoostGraphAlg:BOOL=OFF -configopts += "-DModule_vtkParallelMPI:BOOL=ON " -configopts += "-DModule_vtkPython:BOOL=ON " -# configopts += "-DModule_vtkPythonInterpreter:BOOL=OFF " -# configopts += "-DModule_vtkRenderingExternal:BOOL=OFF " -# configopts += "-DModule_vtkRenderingFreeTypeFontConfig:BOOL=OFF " -# configopts += "-DModule_vtkRenderingLICOpenGL2:BOOL=OFF " -# configopts += "-DModule_vtkRenderingMatplotlib:BOOL=OFF " -# configopts += "-DModule_vtkRenderingOSPRay:BOOL=OFF " -# configopts += "-DModule_vtkRenderingOpenVR:BOOL=OFF " -# configopts += "-DModule_vtkRenderingOptiX:BOOL=OFF " -configopts += "-DModule_vtkRenderingParallel:BOOL=ON " -configopts += "-DModule_vtkRenderingParallelLIC:BOOL=ON " -# configopts += "-DModule_vtkRenderingQt:BOOL=OFF " -# configopts += "-DModule_vtkRenderingSceneGraph:BOOL=OFF " -# configopts += "-DModule_vtkRenderingTk:BOOL=OFF " -# configopts += "-DModule_vtkRenderingVolumeAMR:BOOL=OFF " -# configopts += "-DModule_vtkTclTk:BOOL=OFF " -# configopts += "-DModule_vtkTestingCore:BOOL=OFF " -# configopts += "-DModule_vtkTestingGenericBridge:BOOL=OFF " -# configopts += "-DModule_vtkTestingIOSQL:BOOL=OFF " -# configopts += "-DModule_vtkTestingRendering:BOOL=OFF " -# configopts += "-DModule_vtkUtilitiesBenchmarks:BOOL=OFF " -# configopts += "-DModule_vtkUtilitiesEncodeString:BOOL=OFF " -# configopts += "-DModule_vtkVPIC:BOOL=OFF " -configopts += "-DModule_vtkVTKm:BOOL=ON " -# configopts += "-DModule_vtkViewsGeovis:BOOL=OFF " -# configopts += "-DModule_vtkViewsQt:BOOL=OFF " -# configopts += "-DModule_vtkWebCore:BOOL=OFF " -# configopts += "-DModule_vtkWebGLExporter:BOOL=OFF " -# configopts += "-DModule_vtkWebPython:BOOL=OFF " -# configopts += "-DModule_vtkWrappingJava:BOOL=OFF " -# configopts += "-DModule_vtkWrappingPythonCore:BOOL=OFF " -# configopts += "-DModule_vtkWrappingTools:BOOL=OFF " -# configopts += "-DModule_vtkdiy2:BOOL=OFF " -# configopts += "-DModule_vtkkissfft:BOOL=OFF " -# configopts += "-DModule_vtkmpi4py:BOOL=OFF " -# configopts += "-DModule_vtkpegtl:BOOL=OFF " -# configopts += "-DModule_vtkxdmf2:BOOL=OFF " -# configopts += "-DModule_vtkxdmf3:BOOL=OFF " -# configopts += "-DModule_vtkzfp:BOOL=OFF " - -preinstallopts = "export PYTHONPATH=%(installdir)s/lib/python%(pyshortver)s/site-packages:$PYTHONPATH && " - -# Install a egg-info file so VTK is more python friendly, required for mayavi -local_egg_info_src = '%(builddir)s/VTK-%(version)s/vtk-version.egg-info' -local_egg_info_dest = '%(installdir)s/lib/python%(pyshortver)s/site-packages/vtk-%(version)s.egg-info' -postinstallcmds = [ - 'sed "s/#VTK_VERSION#/%%(version)s/" %s > %s' % (local_egg_info_src, local_egg_info_dest), -] - -modextrapaths = {'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages']} - -sanity_check_paths = { - 'files': ['bin/vtk%s-%%(version_major_minor)s' % x for x in - ['WrapPythonInit', 'WrapPython', 'WrapHierarchy']] + - ['bin/pvtkpython', 'bin/vtkpython'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/', 'include/vtk-%(version_major_minor)s'], -} - -sanity_check_commands = [ - ('python', "-c 'import %(namelower)s'"), - ('python', "-c 'import pkg_resources; pkg_resources.get_distribution(\"vtk\")'"), -] - -moduleclass = 'vis' diff --git a/Golden_Repo/v/VTK/VTK-8.2.0_gcc10.patch b/Golden_Repo/v/VTK/VTK-8.2.0_gcc10.patch deleted file mode 100644 index 95118899e1d50131d5e534a018b63f85c60f6aa9..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VTK/VTK-8.2.0_gcc10.patch +++ /dev/null @@ -1,34 +0,0 @@ ---- a/CMake/VTKGenerateExportHeader.cmake 2020-05-24 14:33:12.154603698 -0600 -+++ b/CMake/VTKGenerateExportHeader.cmake 2020-05-24 14:33:34.864603776 -0600 -@@ -174,8 +174,12 @@ - execute_process(COMMAND ${CMAKE_C_COMPILER} --version - OUTPUT_VARIABLE _gcc_version_info - ERROR_VARIABLE _gcc_version_info) -- string(REGEX MATCH "[3-9]\\.[0-9]\\.[0-9]*" -+ string(REGEX MATCH "[1-9][0-9]\\.[0-9]\\.[0-9]*" - _gcc_version "${_gcc_version_info}") -+ if(NOT _gcc_version) -+ string(REGEX MATCH "[3-9]\\.[0-9]\\.[0-9]*" -+ _gcc_version "${_gcc_version_info}") -+ endif() - # gcc on mac just reports: "gcc (GCC) 3.3 20030304 ..." without the - # patch level, handle this here: - if(NOT _gcc_version) ---- a/ThirdParty/exodusII/vtkexodusII/src/ex_create_par.c -+++ b/ThirdParty/exodusII/vtkexodusII/src/ex_create_par.c -@@ -216,5 +216,5 @@ int ex_create_par_int(const char *path, int cmode, int *comp_ws, int *io_ws, MPI - * Prevent warning in some versions of ranlib(1) because the object - * file has no symbols. - */ --const char exodus_unused_symbol_dummy_1; -+const char exodus_unused_symbol_dummy_ex_create_par; - #endif ---- a/ThirdParty/exodusII/vtkexodusII/src/ex_open_par.c -+++ b/ThirdParty/exodusII/vtkexodusII/src/ex_open_par.c -@@ -459,5 +459,5 @@ int ex_open_par_int(const char *path, int mode, int *comp_ws, int *io_ws, float - * Prevent warning in some versions of ranlib(1) because the object - * file has no symbols. - */ --const char exodus_unused_symbol_dummy_1; -+const char exodus_unused_symbol_dummy_ex_open_par; - #endif diff --git a/Golden_Repo/v/VTK/VTK-8.2.0_python_3.8_compatibility.patch b/Golden_Repo/v/VTK/VTK-8.2.0_python_3.8_compatibility.patch deleted file mode 100644 index 60134b1fee16d4856ded561d40795e9a7e521540..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VTK/VTK-8.2.0_python_3.8_compatibility.patch +++ /dev/null @@ -1,176 +0,0 @@ -From 257b9d7b18d5f3db3fe099dc18f230e23f7dfbab Mon Sep 17 00:00:00 2001 -From: David Gobbi <david.gobbi@gmail.com> -Date: Tue, 20 Aug 2019 17:02:24 -0600 -Subject: [PATCH] Compatibility for Python 3.8 - -The PyTypeObject struct was modified in Python 3.8, this change is -required to avoid compile errors. ---- - .../PythonInterpreter/vtkPythonStdStreamCaptureHelper.h | 6 ++++++ - Wrapping/PythonCore/PyVTKMethodDescriptor.cxx | 2 +- - Wrapping/PythonCore/PyVTKNamespace.cxx | 2 +- - Wrapping/PythonCore/PyVTKReference.cxx | 8 ++++---- - Wrapping/PythonCore/PyVTKTemplate.cxx | 2 +- - Wrapping/PythonCore/vtkPythonCompatibility.h | 8 +++++++- - Wrapping/Tools/vtkWrapPythonClass.c | 2 +- - Wrapping/Tools/vtkWrapPythonEnum.c | 2 +- - Wrapping/Tools/vtkWrapPythonType.c | 2 +- - 9 files changed, 23 insertions(+), 11 deletions(-) - -diff --git a/Utilities/PythonInterpreter/vtkPythonStdStreamCaptureHelper.h b/Utilities/PythonInterpreter/vtkPythonStdStreamCaptureHelper.h -index b1c12c83de..14ccfbe928 100644 ---- a/Utilities/PythonInterpreter/vtkPythonStdStreamCaptureHelper.h -+++ b/Utilities/PythonInterpreter/vtkPythonStdStreamCaptureHelper.h -@@ -140,6 +140,12 @@ static PyTypeObject vtkPythonStdStreamCaptureHelperType = { - #if PY_VERSION_HEX >= 0x03040000 - 0, // tp_finalize - #endif -+#if PY_VERSION_HEX >= 0x03080000 -+ 0, // tp_vectorcall -+#if PY_VERSION_HEX < 0x03090000 -+ 0, // tp_print -+#endif -+#endif - }; - - static PyObject* vtkWrite(PyObject* self, PyObject* args) -diff --git a/Wrapping/PythonCore/PyVTKMethodDescriptor.cxx b/Wrapping/PythonCore/PyVTKMethodDescriptor.cxx -index 2b0d443537..3840038498 100644 ---- a/Wrapping/PythonCore/PyVTKMethodDescriptor.cxx -+++ b/Wrapping/PythonCore/PyVTKMethodDescriptor.cxx -@@ -186,7 +186,7 @@ PyTypeObject PyVTKMethodDescriptor_Type = { - sizeof(PyMethodDescrObject), // tp_basicsize - 0, // tp_itemsize - PyVTKMethodDescriptor_Delete, // tp_dealloc -- nullptr, // tp_print -+ 0, // tp_vectorcall_offset - nullptr, // tp_getattr - nullptr, // tp_setattr - nullptr, // tp_compare -diff --git a/Wrapping/PythonCore/PyVTKNamespace.cxx b/Wrapping/PythonCore/PyVTKNamespace.cxx -index 71ee2a3516..5cf5bfbe6b 100644 ---- a/Wrapping/PythonCore/PyVTKNamespace.cxx -+++ b/Wrapping/PythonCore/PyVTKNamespace.cxx -@@ -49,7 +49,7 @@ PyTypeObject PyVTKNamespace_Type = { - 0, // tp_basicsize - 0, // tp_itemsize - PyVTKNamespace_Delete, // tp_dealloc -- nullptr, // tp_print -+ 0, // tp_vectorcall_offset - nullptr, // tp_getattr - nullptr, // tp_setattr - nullptr, // tp_compare -diff --git a/Wrapping/PythonCore/PyVTKReference.cxx b/Wrapping/PythonCore/PyVTKReference.cxx -index 943ac71080..b7104091c0 100644 ---- a/Wrapping/PythonCore/PyVTKReference.cxx -+++ b/Wrapping/PythonCore/PyVTKReference.cxx -@@ -1010,7 +1010,7 @@ PyTypeObject PyVTKReference_Type = { - sizeof(PyVTKReference), // tp_basicsize - 0, // tp_itemsize - PyVTKReference_Delete, // tp_dealloc -- nullptr, // tp_print -+ 0, // tp_vectorcall_offset - nullptr, // tp_getattr - nullptr, // tp_setattr - nullptr, // tp_compare -@@ -1067,7 +1067,7 @@ PyTypeObject PyVTKNumberReference_Type = { - sizeof(PyVTKReference), // tp_basicsize - 0, // tp_itemsize - PyVTKReference_Delete, // tp_dealloc -- nullptr, // tp_print -+ 0, // tp_vectorcall_offset - nullptr, // tp_getattr - nullptr, // tp_setattr - nullptr, // tp_compare -@@ -1124,7 +1124,7 @@ PyTypeObject PyVTKStringReference_Type = { - sizeof(PyVTKReference), // tp_basicsize - 0, // tp_itemsize - PyVTKReference_Delete, // tp_dealloc -- nullptr, // tp_print -+ 0, // tp_vectorcall_offset - nullptr, // tp_getattr - nullptr, // tp_setattr - nullptr, // tp_compare -@@ -1181,7 +1181,7 @@ PyTypeObject PyVTKTupleReference_Type = { - sizeof(PyVTKReference), // tp_basicsize - 0, // tp_itemsize - PyVTKReference_Delete, // tp_dealloc -- nullptr, // tp_print -+ 0, // tp_vectorcall_offset - nullptr, // tp_getattr - nullptr, // tp_setattr - nullptr, // tp_compare -diff --git a/Wrapping/PythonCore/PyVTKTemplate.cxx b/Wrapping/PythonCore/PyVTKTemplate.cxx -index be200985b3..340fe7953b 100644 ---- a/Wrapping/PythonCore/PyVTKTemplate.cxx -+++ b/Wrapping/PythonCore/PyVTKTemplate.cxx -@@ -268,7 +268,7 @@ PyTypeObject PyVTKTemplate_Type = { - 0, // tp_basicsize - 0, // tp_itemsize - nullptr, // tp_dealloc -- nullptr, // tp_print -+ 0, // tp_vectorcall_offset - nullptr, // tp_getattr - nullptr, // tp_setattr - nullptr, // tp_compare -diff --git a/Wrapping/PythonCore/vtkPythonCompatibility.h b/Wrapping/PythonCore/vtkPythonCompatibility.h -index 4a767844a6..be208faeef 100644 ---- a/Wrapping/PythonCore/vtkPythonCompatibility.h -+++ b/Wrapping/PythonCore/vtkPythonCompatibility.h -@@ -64,7 +64,13 @@ - #endif - - // PyTypeObject compatibility --#if PY_VERSION_HEX >= 0x03040000 -+#if PY_VERSION_HEX >= 0x03090000 -+#define VTK_WRAP_PYTHON_SUPPRESS_UNINITIALIZED \ -+ 0, 0, 0, 0, -+#elif PY_VERSION_HEX >= 0x03080000 -+#define VTK_WRAP_PYTHON_SUPPRESS_UNINITIALIZED \ -+ 0, 0, 0, 0, 0, -+#elif PY_VERSION_HEX >= 0x03040000 - #define VTK_WRAP_PYTHON_SUPPRESS_UNINITIALIZED \ - 0, 0, 0, - #else -diff --git a/Wrapping/Tools/vtkWrapPythonClass.c b/Wrapping/Tools/vtkWrapPythonClass.c -index b1e45f8e80..4d558ea081 100644 ---- a/Wrapping/Tools/vtkWrapPythonClass.c -+++ b/Wrapping/Tools/vtkWrapPythonClass.c -@@ -521,7 +521,7 @@ void vtkWrapPython_GenerateObjectType( - " sizeof(PyVTKObject), // tp_basicsize\n" - " 0, // tp_itemsize\n" - " PyVTKObject_Delete, // tp_dealloc\n" -- " nullptr, // tp_print\n" -+ " 0, // tp_vectorcall_offset\n" - " nullptr, // tp_getattr\n" - " nullptr, // tp_setattr\n" - " nullptr, // tp_compare\n" -diff --git a/Wrapping/Tools/vtkWrapPythonEnum.c b/Wrapping/Tools/vtkWrapPythonEnum.c -index b933702242..1249362854 100644 ---- a/Wrapping/Tools/vtkWrapPythonEnum.c -+++ b/Wrapping/Tools/vtkWrapPythonEnum.c -@@ -145,7 +145,7 @@ void vtkWrapPython_GenerateEnumType( - " sizeof(PyIntObject), // tp_basicsize\n" - " 0, // tp_itemsize\n" - " nullptr, // tp_dealloc\n" -- " nullptr, // tp_print\n" -+ " 0, // tp_vectorcall_offset\n" - " nullptr, // tp_getattr\n" - " nullptr, // tp_setattr\n" - " nullptr, // tp_compare\n" -diff --git a/Wrapping/Tools/vtkWrapPythonType.c b/Wrapping/Tools/vtkWrapPythonType.c -index 744cb1b9d3..0a1375e541 100644 ---- a/Wrapping/Tools/vtkWrapPythonType.c -+++ b/Wrapping/Tools/vtkWrapPythonType.c -@@ -709,7 +709,7 @@ void vtkWrapPython_GenerateSpecialType( - " sizeof(PyVTKSpecialObject), // tp_basicsize\n" - " 0, // tp_itemsize\n" - " Py%s_Delete, // tp_dealloc\n" -- " nullptr, // tp_print\n" -+ " 0, // tp_vectorcall_offset\n" - " nullptr, // tp_getattr\n" - " nullptr, // tp_setattr\n" - " nullptr, // tp_compare\n" --- -2.24.1 - diff --git a/Golden_Repo/v/VTune/VTune-2019_update8.eb b/Golden_Repo/v/VTune/VTune-2019_update8.eb deleted file mode 100644 index 30aa9a814f38568759ea355cb9773316419f6bfb..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VTune/VTune-2019_update8.eb +++ /dev/null @@ -1,17 +0,0 @@ -name = 'VTune' -version = '2019_update8' - -homepage = 'http://software.intel.com/en-us/intel-vtune-amplifier-xe' -description = 'Intel VTune Amplifier is the premier performance profiler for C, C++, C#, Fortran, Assembly and Java.' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['vtune_amplifier_%(version)s.tar.gz'] - -dontcreateinstalldir = True - -requires_runtime_license = False - -moduleclass = 'tools' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.15.0-no-MPI-1.patch b/Golden_Repo/v/Valgrind/Valgrind-3.15.0-no-MPI-1.patch deleted file mode 100644 index 54d26dd4043f5f3b83911077958d80f598effe16..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.15.0-no-MPI-1.patch +++ /dev/null @@ -1,66 +0,0 @@ -From a3262243ca01f7cfe3ae86b168d5b3a7e5d1cbc0 Mon Sep 17 00:00:00 2001 -From: Balint Reczey <balint.reczey@canonical.com> -Date: Thu, 28 Nov 2019 19:31:12 +0100 -Subject: [PATCH] Drop MPI 1 support - -Signed-off-by: Balint Reczey <balint.reczey@canonical.com> ---- - mpi/libmpiwrap.c | 16 ++-------------- - 1 file changed, 2 insertions(+), 14 deletions(-) - -diff --git a/mpi/libmpiwrap.c b/mpi/libmpiwrap.c -index 488bb13fd..a740ea562 100644 ---- a/mpi/libmpiwrap.c -+++ b/mpi/libmpiwrap.c -@@ -278,8 +278,6 @@ static void showTy ( FILE* f, MPI_Datatype ty ) - else if (ty == MPI_LONG_INT) fprintf(f,"LONG_INT"); - else if (ty == MPI_SHORT_INT) fprintf(f,"SHORT_INT"); - else if (ty == MPI_2INT) fprintf(f,"2INT"); -- else if (ty == MPI_UB) fprintf(f,"UB"); -- else if (ty == MPI_LB) fprintf(f,"LB"); - # if defined(MPI_WCHAR) - else if (ty == MPI_WCHAR) fprintf(f,"WCHAR"); - # endif -@@ -350,20 +348,11 @@ static void showCombiner ( FILE* f, int combiner ) - # endif - case MPI_COMBINER_CONTIGUOUS: fprintf(f, "CONTIGUOUS"); break; - case MPI_COMBINER_VECTOR: fprintf(f, "VECTOR"); break; --#if defined(MPI_COMBINER_HVECTOR_INTEGER) -- case MPI_COMBINER_HVECTOR_INTEGER: fprintf(f, "HVECTOR_INTEGER"); break; --# endif - case MPI_COMBINER_HVECTOR: fprintf(f, "HVECTOR"); break; - case MPI_COMBINER_INDEXED: fprintf(f, "INDEXED"); break; --#if defined(MPI_COMBINER_HINDEXED_INTEGER) -- case MPI_COMBINER_HINDEXED_INTEGER: fprintf(f, "HINDEXED_INTEGER"); break; --# endif - case MPI_COMBINER_HINDEXED: fprintf(f, "HINDEXED"); break; - #if defined(MPI_COMBINER_INDEXED_BLOCK) - case MPI_COMBINER_INDEXED_BLOCK: fprintf(f, "INDEXED_BLOCK"); break; --# endif --#if defined(MPI_COMBINER_STRUCT_INTEGER) -- case MPI_COMBINER_STRUCT_INTEGER: fprintf(f, "STRUCT_INTEGER"); break; - # endif - case MPI_COMBINER_STRUCT: fprintf(f, "STRUCT"); break; - #if defined(MPI_COMBINER_SUBARRAY) -@@ -458,8 +447,9 @@ Bool isMSI ( MPI_Status* status ) - static long extentOfTy ( MPI_Datatype ty ) - { - int r; -+ MPI_Aint lb; - MPI_Aint n; -- r = PMPI_Type_extent(ty, &n); -+ r = MPI_Type_get_extent(ty, &lb, &n); - assert(r == MPI_SUCCESS); - return (long)n; - } -@@ -733,8 +723,6 @@ void walk_type ( void(*f)(void*,long), char* base, MPI_Datatype ty ) - f(base + offsetof(Ty,loc), sizeof(int)); - return; - } -- if (ty == MPI_LB || ty == MPI_UB) -- return; /* have zero size, so nothing needs to be done */ - goto unhandled; - /*NOTREACHED*/ - } --- -2.17.1 diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-gompi-2020.eb b/Golden_Repo/v/Valgrind/Valgrind-3.16.1-gompi-2020.eb deleted file mode 100644 index 038407d8c4498482032cf684c16f53a8621d4e7c..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-gompi-2020.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.16.1' - -homepage = 'http://valgrind.org/downloads/' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -source_urls = ['ftp://sourceware.org/pub/valgrind/'] -sources = [SOURCELOWER_TAR_BZ2] -patches = ['Valgrind-3.15.0-no-MPI-1.patch'] -checksums = [ - # valgrind-3.16.1.tar.bz2 - 'c91f3a2f7b02db0f3bc99479861656154d241d2fdb265614ba918cc6720a33ca', - # Valgrind-3.15.0-no-MPI-1.patch - 'e99464ad9f2d2e74c5c50d2fa1bcf8637295b11cd841102035f8601b01afd85f', -] - -toolchain = {'name': 'gompi', 'version': '2020'} - -preconfigopts = 'unset CC CFLAGS &&' -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-gpsmpi-2020.eb b/Golden_Repo/v/Valgrind/Valgrind-3.16.1-gpsmpi-2020.eb deleted file mode 100644 index 0d00517f41f1f228751ded57b32b2c4f96d3454e..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-gpsmpi-2020.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.16.1' - -homepage = 'http://valgrind.org/downloads/' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -source_urls = ['ftp://sourceware.org/pub/valgrind/'] -sources = [SOURCELOWER_TAR_BZ2] - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -preconfigopts = 'unset CC CFLAGS &&' -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-iimpi-2020.eb b/Golden_Repo/v/Valgrind/Valgrind-3.16.1-iimpi-2020.eb deleted file mode 100644 index 4f9dadbbc4a5c27c544539c44522b02aad4c83ad..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-iimpi-2020.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.16.1' - -homepage = 'http://valgrind.org/downloads/' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -source_urls = ['ftp://sourceware.org/pub/valgrind/'] -sources = [SOURCELOWER_TAR_BZ2] - -toolchain = {'name': 'iimpi', 'version': '2020'} - -preconfigopts = 'unset CC CFLAGS &&' -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-iompi-2020.eb b/Golden_Repo/v/Valgrind/Valgrind-3.16.1-iompi-2020.eb deleted file mode 100644 index 48fb40accf2013ae7a41a78f4df3ac38e0178d21..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-iompi-2020.eb +++ /dev/null @@ -1,37 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.16.1' - -homepage = 'http://valgrind.org/downloads/' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -source_urls = ['ftp://sourceware.org/pub/valgrind/'] -sources = [SOURCELOWER_TAR_BZ2] -patches = ['Valgrind-3.15.0-no-MPI-1.patch'] -checksums = [ - # valgrind-3.16.1.tar.bz2 - 'c91f3a2f7b02db0f3bc99479861656154d241d2fdb265614ba918cc6720a33ca', - # Valgrind-3.15.0-no-MPI-1.patch - 'e99464ad9f2d2e74c5c50d2fa1bcf8637295b11cd841102035f8601b01afd85f', -] - -toolchain = {'name': 'iompi', 'version': '2020'} - -preconfigopts = 'unset CC CFLAGS &&' -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-ipsmpi-2020.eb b/Golden_Repo/v/Valgrind/Valgrind-3.16.1-ipsmpi-2020.eb deleted file mode 100644 index 33891186d2b011c14503f1ee83035ed1315cdd69..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.16.1-ipsmpi-2020.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.16.1' - -homepage = 'http://valgrind.org/downloads/' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -source_urls = ['ftp://sourceware.org/pub/valgrind/'] -sources = [SOURCELOWER_TAR_BZ2] - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -preconfigopts = 'unset CC CFLAGS &&' -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-gompi-2021.eb b/Golden_Repo/v/Valgrind/Valgrind-3.17.0-gompi-2021.eb deleted file mode 100644 index e4cc1fdc5629033820a138fd44ed7a0e10c1cd54..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-gompi-2021.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.17.0' - -homepage = 'https://valgrind.org' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gompi', 'version': '2021'} - -source_urls = [ - 'https://sourceware.org/pub/valgrind/', - 'https://www.mirrorservice.org/sites/sourceware.org/pub/valgrind/', -] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['ad3aec668e813e40f238995f60796d9590eee64a16dff88421430630e69285a2'] - -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-gpsmpi-2021.eb b/Golden_Repo/v/Valgrind/Valgrind-3.17.0-gpsmpi-2021.eb deleted file mode 100644 index 6daa8181a4e445e0bdfebe0484a708cc60df9fe2..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-gpsmpi-2021.eb +++ /dev/null @@ -1,33 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.17.0' - -homepage = 'https://valgrind.org' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2021'} - -source_urls = [ - 'https://sourceware.org/pub/valgrind/', - 'https://www.mirrorservice.org/sites/sourceware.org/pub/valgrind/', -] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['ad3aec668e813e40f238995f60796d9590eee64a16dff88421430630e69285a2'] - -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-iimpi-2021.eb b/Golden_Repo/v/Valgrind/Valgrind-3.17.0-iimpi-2021.eb deleted file mode 100644 index a4463706c9e9f784780475fc0e74408bd0006d81..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-iimpi-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.17.0' - -homepage = 'https://valgrind.org' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2021'} - -source_urls = [ - 'https://sourceware.org/pub/valgrind/', - 'https://www.mirrorservice.org/sites/sourceware.org/pub/valgrind/', -] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['ad3aec668e813e40f238995f60796d9590eee64a16dff88421430630e69285a2'] - -preconfigopts = 'unset CC CFLAGS &&' -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-iompi-2021.eb b/Golden_Repo/v/Valgrind/Valgrind-3.17.0-iompi-2021.eb deleted file mode 100644 index 1889fb8487263d44d40cbc49bacd60eb26165d6e..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-iompi-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.17.0' - -homepage = 'https://valgrind.org' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'iompi', 'version': '2021'} - -source_urls = [ - 'https://sourceware.org/pub/valgrind/', - 'https://www.mirrorservice.org/sites/sourceware.org/pub/valgrind/', -] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['ad3aec668e813e40f238995f60796d9590eee64a16dff88421430630e69285a2'] - -preconfigopts = 'unset CC CFLAGS &&' -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-ipsmpi-2021.eb b/Golden_Repo/v/Valgrind/Valgrind-3.17.0-ipsmpi-2021.eb deleted file mode 100644 index 265fb96222282cd1240be4fd8bbe0dc2aad3c077..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Valgrind/Valgrind-3.17.0-ipsmpi-2021.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Valgrind' -version = '3.17.0' - -homepage = 'https://valgrind.org' -description = "Valgrind: Debugging and profiling tools" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} - -source_urls = [ - 'https://sourceware.org/pub/valgrind/', - 'https://www.mirrorservice.org/sites/sourceware.org/pub/valgrind/', -] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['ad3aec668e813e40f238995f60796d9590eee64a16dff88421430630e69285a2'] - -preconfigopts = 'unset CC CFLAGS &&' -configopts = ' --with-mpicc="$MPICC"' - -local_binaries = [ - 'callgrind_annotate', 'callgrind_control', 'cg_annotate', 'cg_diff', - 'cg_merge', 'ms_print', 'valgrind', 'valgrind-listener', 'vgdb' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries] + - ['lib/valgrind/libmpiwrap-amd64-linux.%s' % SHLIB_EXT], - 'dirs': [] -} - -moduleclass = 'debugger' diff --git a/Golden_Repo/v/VirtualGL/VirtualGL-2.6.4-GCCcore-10.3.0.eb b/Golden_Repo/v/VirtualGL/VirtualGL-2.6.4-GCCcore-10.3.0.eb deleted file mode 100644 index 5e585f9de82102a3051982c3ef2f200f3c6bb65f..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VirtualGL/VirtualGL-2.6.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'VirtualGL' -version = '2.6.4' - -homepage = 'https://virtualgl.org/' -description = """VirtualGL is an open source toolkit that gives any Linux or -Unix remote display software the ability to run OpenGL applications with full -hardware acceleration.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/VirtualGL/virtualgl/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['1f9574dc9d315efef9cb04fa3b8d9021943f424bb4402f8823a14453035317c4'] - -patches = [ - 'virtualgl_cmake_lib_path.patch', -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM) -] - -dependencies = [ - ('libjpeg-turbo', '2.0.5'), - ('OpenGL', '2020'), -] - -separate_build_dir = True -configopts = '-DVGL_FAKEOPENCL=OFF' - -local_binaries = [ - 'cpustat', 'glreadtest', 'glxinfo', 'glxspheres64', 'nettest', 'tcbench', - 'vglclient', 'vglconfig', 'vglconnect', 'vglgenkey', 'vgllogin', 'vglrun', - 'vglserver_config' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries], - 'dirs': ['lib64', 'share', 'include'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/v/VirtualGL/VirtualGL-2.6.4-GCCcore-9.3.0.eb b/Golden_Repo/v/VirtualGL/VirtualGL-2.6.4-GCCcore-9.3.0.eb deleted file mode 100644 index 7957c18105baaf568e939700a06242e1fa33e33c..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VirtualGL/VirtualGL-2.6.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'VirtualGL' -version = '2.6.4' - -homepage = 'https://virtualgl.org/' -description = """VirtualGL is an open source toolkit that gives any Linux or -Unix remote display software the ability to run OpenGL applications with full -hardware acceleration.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/VirtualGL/virtualgl/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['1f9574dc9d315efef9cb04fa3b8d9021943f424bb4402f8823a14453035317c4'] - -patches = [ - 'virtualgl_cmake_lib_path.patch', -] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0') -] - -dependencies = [ - ('libjpeg-turbo', '2.0.5'), - ('OpenGL', '2020'), -] - -separate_build_dir = True -configopts = '-DVGL_FAKEOPENCL=OFF' - -local_binaries = [ - 'cpustat', 'glreadtest', 'glxinfo', 'glxspheres64', 'nettest', 'tcbench', - 'vglclient', 'vglconfig', 'vglconnect', 'vglgenkey', 'vgllogin', 'vglrun', - 'vglserver_config' -] - -sanity_check_paths = { - 'files': ['bin/%s' % x for x in local_binaries], - 'dirs': ['lib64', 'share', 'include'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/v/VirtualGL/virtualgl_cmake_lib_path.patch b/Golden_Repo/v/VirtualGL/virtualgl_cmake_lib_path.patch deleted file mode 100644 index f58d0cbe2a5f5836f79a646c40f4e703c61b46d3..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/VirtualGL/virtualgl_cmake_lib_path.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff -ruN virtualgl-2.5.2/CMakeLists.txt virtualgl-2.5.2.old/CMakeLists.txt ---- virtualgl-2.5.2/CMakeLists.txt 2017-03-03 00:13:45.000000000 +0100 -+++ virtualgl-2.5.2.old/CMakeLists.txt 2017-03-20 16:14:04.908614846 +0100 -@@ -261,9 +261,6 @@ - - else() - --if(CMAKE_SYSTEM_NAME STREQUAL "Linux") -- set(CMAKE_LIBRARY_PATH /usr/lib/${CPU_TYPE}-linux-gnu;/usr/lib${BITS};/usr/lib) --endif() - include(FindX11) - include(FindOpenGL) - diff --git a/Golden_Repo/v/Voro++/Voro++-0.4.6-GCCcore-10.3.0.eb b/Golden_Repo/v/Voro++/Voro++-0.4.6-GCCcore-10.3.0.eb deleted file mode 100644 index 261d9c1b3187c1d16476a1d568ffddc14b8fccc6..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Voro++/Voro++-0.4.6-GCCcore-10.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Voro++' -version = '0.4.6' - -homepage = 'http://math.lbl.gov/voro++/' -description = """Voro++ is a software library for carrying out three-dimensional computations of the Voronoi -tessellation. A distinguishing feature of the Voro++ library is that it carries out cell-based calculations, -computing the Voronoi cell for each particle individually. It is particularly well-suited for applications that -rely on cell-based statistics, where features of Voronoi cells (eg. volume, centroid, number of faces) can be used -to analyze a system of particles.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://math.lbl.gov/%(namelower)s/download/dir/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['ef7970071ee2ce3800daa8723649ca069dc4c71cc25f0f7d22552387f3ea437e'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -# Override CXX and CFLAGS variables from Makefile -buildopts = 'CXX="$CXX" CFLAGS="$CXXFLAGS"' - -# Override PREFIX variable from Makefile -installopts = 'PREFIX=%(installdir)s' - -# No configure -skipsteps = ['configure'] - -sanity_check_paths = { - 'files': ['bin/%(namelower)s', 'lib/libvoro++.a', 'include/%(namelower)s/%(namelower)s.hh'], - 'dirs': [], -} - -moduleclass = 'math' diff --git a/Golden_Repo/v/Voro++/Voro++-0.4.6-GCCcore-9.3.0.eb b/Golden_Repo/v/Voro++/Voro++-0.4.6-GCCcore-9.3.0.eb deleted file mode 100644 index 8288127d7683b0c5abf4f4f759011b4b5f9593fc..0000000000000000000000000000000000000000 --- a/Golden_Repo/v/Voro++/Voro++-0.4.6-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ - -easyblock = 'ConfigureMake' - -name = 'Voro++' -version = '0.4.6' - -homepage = 'http://math.lbl.gov/voro++/' -description = """Voro++ is a software library for carrying out three-dimensional computations of the Voronoi -tessellation. A distinguishing feature of the Voro++ library is that it carries out cell-based calculations, -computing the Voronoi cell for each particle individually. It is particularly well-suited for applications that -rely on cell-based statistics, where features of Voronoi cells (eg. volume, centroid, number of faces) can be used -to analyze a system of particles.""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['http://math.lbl.gov/%(namelower)s/download/dir/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['ef7970071ee2ce3800daa8723649ca069dc4c71cc25f0f7d22552387f3ea437e'] - -builddependencies = [ - ('binutils', '2.34'), -] - -# Override CXX and CFLAGS variables from Makefile -buildopts = 'CXX="$CXX" CFLAGS="$CXXFLAGS"' - -# Override PREFIX variable from Makefile -installopts = 'PREFIX=%(installdir)s' - -# No configure -skipsteps = ['configure'] - -sanity_check_paths = { - 'files': ['bin/%(namelower)s', 'lib/libvoro++.a', 'include/%(namelower)s/%(namelower)s.hh'], - 'dirs': [], -} - -moduleclass = 'math' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-gomkl-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-gomkl-2020.eb deleted file mode 100644 index fe3b58ecd55f8cf2de5416ee27a40a9163469488..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-gomkl-2020.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gomkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-gomkl-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-gomkl-2021.eb deleted file mode 100644 index 8791dd185eca45fb52df1930dfdb860cfa838e63..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-gomkl-2021.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-gpsmkl-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-gpsmkl-2020.eb deleted file mode 100644 index 2dfce9a9ba887cc94907a1bf0f8dd9779a87b17d..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-gpsmkl-2020.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-gpsmkl-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-gpsmkl-2021.eb deleted file mode 100644 index 48cab592738ba5831b9d9b797f7dc226dbc95333..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-gpsmkl-2021.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-2020.eb deleted file mode 100644 index c377e147dc2fef34a55010886fcaaee4c7dad2f9..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-2020.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-2021.eb deleted file mode 100644 index 828475aa905983b411a4040d7229e2216ae3c1e8..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-2021.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-para-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-para-2020.eb deleted file mode 100644 index 7341c252a8c229d9eac17ff987dc35c19c5c98a7..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-para-2020.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-para-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-para-2021.eb deleted file mode 100644 index 2db072fc75c0ac361d8e8048b4629a27f325451f..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-intel-para-2021.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-iomkl-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-iomkl-2020.eb deleted file mode 100644 index bcda6b5dc987b8716de77e9aa330c47cd13485ef..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-iomkl-2020.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'iomkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-1.2-iomkl-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-1.2-iomkl-2021.eb deleted file mode 100644 index d63ff10430a841e4a1144d9758754653df509cba..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-1.2-iomkl-2021.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '1.2' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'usempi': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.wannier.org/code'] - -patches = ['Wannier90_1x_ignore_makesys.patch'] - -checksums = [ - # wannier90-1.2.tar.gz - 'a76f88eef01c5a40aaa2c74ee393ede8a57bd9085f6b7f2ab656b50c1a30ece4', - # Wannier90_1x_ignore_makesys.patch - '8d4c60cfba6722b7ddc0fad8f0d0e4028990162dca5ff5ffa894e7b11ca21a33', -] - -prebuildopts = 'F90=$F90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' - -# build program and library -buildopts = 'all' - -files_to_copy = [(['wannier90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -# parallel build tends to fail -parallel = 1 - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-2.0.1.1-gpsmkl-2020-abinit.eb b/Golden_Repo/w/Wannier90/Wannier90-2.0.1.1-gpsmkl-2020-abinit.eb deleted file mode 100644 index 378e93faa5bb029c370a9762395a61d6a40e6710..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-2.0.1.1-gpsmkl-2020-abinit.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -# patched version of Wannier90 2.0.1 for ABINIT 8.10.x -name = 'Wannier90' -version = '2.0.1.1' -versionsuffix = '-abinit' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles (s.achilles@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['http://forge.abinit.org/fallbacks'] -sources = [{ - 'download_filename': SOURCELOWER_TAR_GZ, - 'filename': '%(namelower)s-%(version)s%(versionsuffix)s.tar.gz', -}] -checksums = ['bf204369d7a6bc57e2bbfb6f9640419b95e5c1cf5fa9446648e260dacf8e30b0'] - -configopts = ' --with-linalg-libs="-L$EBROOTIMKL/lib/intel64 -Wl,--start-group' -configopts += ' -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -Wl,--end-group -lpthread -lm -ldl" ' - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier90.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-2.0.1.1-intel-para-2020-abinit.eb b/Golden_Repo/w/Wannier90/Wannier90-2.0.1.1-intel-para-2020-abinit.eb deleted file mode 100644 index b35512e3e63c3c1941b39974387217305b73cbec..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-2.0.1.1-intel-para-2020-abinit.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -# patched version of Wannier90 2.0.1 for ABINIT 8.10.x -name = 'Wannier90' -version = '2.0.1.1' -versionsuffix = '-abinit' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['http://forge.abinit.org/fallbacks'] -sources = [{ - 'download_filename': SOURCELOWER_TAR_GZ, - 'filename': '%(namelower)s-%(version)s%(versionsuffix)s.tar.gz', -}] -checksums = ['bf204369d7a6bc57e2bbfb6f9640419b95e5c1cf5fa9446648e260dacf8e30b0'] - -configopts = ' --with-linalg-libs="-L$EBROOTIMKL/lib/intel64 -Wl,--start-group' -configopts += ' -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -Wl,--end-group -lpthread -lm -ldl" ' - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier90.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gomkl-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gomkl-2020.eb deleted file mode 100644 index 6b1ddb87637ff8ff7f1e4e776be5bbd60afc6572..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gomkl-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gomkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [{'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', # wannier90-3.1.0.tar.gz - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', # Wannier90_3x_ignore_makeinc.patch -] - -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gomkl-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gomkl-2021.eb deleted file mode 100644 index bf29e0bd59fe33c9c558c403f62b60ccf6f1d8d9..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gomkl-2021.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - {'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', - # Wannier90_3x_ignore_makeinc.patch - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', -] - -# The -fallow-argument-mismatch allows MPI communication calls to be -# called with arrays of different types at different places in the -# code. This otherwise cause an error in GCC 10.X -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS -fallow-argument-mismatch" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), - (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gpsmkl-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gpsmkl-2020.eb deleted file mode 100644 index 955ab3844e43db3417515778632707acfef08e51..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gpsmkl-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [{'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', # wannier90-3.1.0.tar.gz - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', # Wannier90_3x_ignore_makeinc.patch -] - -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gpsmkl-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gpsmkl-2021.eb deleted file mode 100644 index 73605ae673eb09c12b54686fc5259beb0c5cf6bf..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-gpsmkl-2021.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - {'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', - # Wannier90_3x_ignore_makeinc.patch - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', -] - -# The -fallow-argument-mismatch allows MPI communication calls to be -# called with arrays of different types at different places in the -# code. This otherwise cause an error in GCC 10.X -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS -fallow-argument-mismatch" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), - (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-2020.eb deleted file mode 100644 index 1213466adf65bed0950c52590671b837f6f58bc2..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [{'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', # wannier90-3.1.0.tar.gz - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', # Wannier90_3x_ignore_makeinc.patch -] - -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-2021.eb deleted file mode 100644 index d29fae82a4f0f860dd448d809d66bfee7c964f41..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-2021.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - {'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', - # Wannier90_3x_ignore_makeinc.patch - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', -] - -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), - (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-para-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-para-2020.eb deleted file mode 100644 index 93b6bcf45d566c79db0de340af4e758cf82eba07..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-para-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [{'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', # wannier90-3.1.0.tar.gz - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', # Wannier90_3x_ignore_makeinc.patch -] - -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-para-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-para-2021.eb deleted file mode 100644 index 5244f64418fe68b1a733aa48b2fb195e9a04e340..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-intel-para-2021.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - {'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', - # Wannier90_3x_ignore_makeinc.patch - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', -] - -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), - (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-iomkl-2020.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-iomkl-2020.eb deleted file mode 100644 index 101aab8efba103fd4c316b0ff9fc48a862c430fc..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-iomkl-2020.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'iomkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [{'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', # wannier90-3.1.0.tar.gz - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', # Wannier90_3x_ignore_makeinc.patch -] - -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-iomkl-2021.eb b/Golden_Repo/w/Wannier90/Wannier90-3.1.0-iomkl-2021.eb deleted file mode 100644 index 45ce136f5a59ae605fd7eb8abd763ee482f03657..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90-3.1.0-iomkl-2021.eb +++ /dev/null @@ -1,38 +0,0 @@ -easyblock = 'MakeCp' - -name = 'Wannier90' -version = '3.1.0' - -homepage = 'http://www.wannier.org' -description = """A tool for obtaining maximally-localised Wannier functions""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'usempi': True} - -github_account = 'wannier-developers' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - {'download_filename': 'v%(version)s.tar.gz', 'filename': SOURCELOWER_TAR_GZ}] -patches = ['Wannier90_3x_ignore_makeinc.patch'] -checksums = [ - # wannier90-3.1.0.tar.gz - '40651a9832eb93dec20a8360dd535262c261c34e13c41b6755fa6915c936b254', - # Wannier90_3x_ignore_makeinc.patch - '561c0d296e0e30b8bb303702cd6e41ded54c153d9b9e6cd9cab73858e5e2945e', -] - -buildopts = 'all F90=$F90 MPIF90=$MPIF90 FCOPTS="$FFLAGS" LDOPTS="$FFLAGS" ' -buildopts += 'LIBDIR="$LAPACK_LIB_DIR" LIBS="$LIBLAPACK" ' -buildopts += 'COMMS=mpi' - -files_to_copy = [(['wannier90.x', 'postw90.x'], 'bin'), - (['libwannier.a'], 'lib')] - -sanity_check_paths = { - 'files': ['bin/wannier90.x', 'bin/postw90.x', 'lib/libwannier.a'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/Wannier90/Wannier90_1x_ignore_makesys.patch b/Golden_Repo/w/Wannier90/Wannier90_1x_ignore_makesys.patch deleted file mode 100644 index 07fb605587093cfd8b88b3a3495434f517b5758d..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90_1x_ignore_makesys.patch +++ /dev/null @@ -1,13 +0,0 @@ -avoid including make.sys, which contains hardcoding settings we don't need/want -author: Miguel Dias Costa (National University of Singapore) ---- src/Makefile.orig 2016-04-19 15:26:27.373047000 +0800 -+++ src/Makefile 2016-04-19 15:26:32.414229150 +0800 -@@ -1,7 +1,7 @@ - # Should be no need to change below this line - # - --include ../make.sys -+#include ../make.sys - - OBJS = constants.o io.o utility.o parameters.o hamiltonian.o overlap.o \ - kmesh.o disentangle.o wannierise.o plot.o transport.o diff --git a/Golden_Repo/w/Wannier90/Wannier90_3x_ignore_makeinc.patch b/Golden_Repo/w/Wannier90/Wannier90_3x_ignore_makeinc.patch deleted file mode 100644 index 03ccb352edc33ed8b351513c6f2fce7dd2189b53..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/Wannier90/Wannier90_3x_ignore_makeinc.patch +++ /dev/null @@ -1,32 +0,0 @@ -avoid including make.inc, which contains hardcoding settings we don't need/want -author: J-M Beuken -diff -Nru wannier90-3.0.0.orig/src/Makefile.2 wannier90-3.0.0/src/Makefile.2 ---- wannier90-3.0.0.orig/src/Makefile.2 2019-06-26 22:47:08.067494977 +0200 -+++ wannier90-3.0.0/src/Makefile.2 2019-06-26 22:47:35.313193842 +0200 -@@ -2,7 +2,7 @@ - # Should be no need to change below this line - # - --include ../../make.inc -+#include ../../make.inc - - # Contains definition of OBJS, OBJSLIB, OBJS_POST, LIBRARY, DYNLIBRARY - include ../Makefile.header -diff -Nru wannier90-3.0.0.orig/utility/w90pov/Makefile wannier90-3.0.0/utility/w90pov/Makefile ---- wannier90-3.0.0.orig/utility/w90pov/Makefile 2019-06-26 22:47:08.148494082 +0200 -+++ wannier90-3.0.0/utility/w90pov/Makefile 2019-06-26 23:02:34.442673824 +0200 -@@ -1,4 +1,4 @@ --include ../../make.inc -+#include ../../make.inc - - SRC=src - OBJ=obj -diff -Nru wannier90-3.0.0.orig/utility/w90vdw/Makefile wannier90-3.0.0/utility/w90vdw/Makefile ---- wannier90-3.0.0.orig/utility/w90vdw/Makefile 2019-06-26 22:47:08.153494027 +0200 -+++ wannier90-3.0.0/utility/w90vdw/Makefile 2019-06-26 23:03:01.118385092 +0200 -@@ -1,4 +1,4 @@ --include ../../make.inc -+#include ../../make.inc - - w90vdw.x: w90vdw.f90 - $(F90) $(FCOPTS) $< -o $@ diff --git a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-gpsmkl-2020.eb b/Golden_Repo/w/WannierTools/WannierTools-2.5.1-gpsmkl-2020.eb deleted file mode 100644 index 88191b2a5c3665f773bf0644e14fd66646a39fab..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-gpsmkl-2020.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'WannierTools' -version = '2.5.1' - -homepage = 'http://www.wanniertools.com' -description = """WannierTools is an open source software that studies the physical properties of given tight-binding -model.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/quanshengwu/wannier_tools/archive'] -sources = ['V%(version)s.tar.gz'] -patches = ['WannierTools-2.5.1_fix_Makefile.patch'] -checksums = [ - '22aad8e95f2da07ac2dd892c2ca0438010779e6cc542e30f34ea5c3f580fd72c', # V2.5.1.tar.gz - # WannierTools-2.5.1_fix_Makefile.patch - '1752319cad40fc258c5b9753baa9db4ca1c5a4d931234164d66212c7ba92f7cb', -] - -skipsteps = ['configure'] - -prebuildopts = 'cd src && ' -preinstallopts = prebuildopts - -# use -cpp -fallow-argument-mismatch -ffree-line-length-512 for GNU based compilers -# use -fpp for Intel based compilers -# We use the same Makefile regardless of toolchain due to the Makefile patch -buildopts = '-f Makefile.intel-mpi F90FLAGS="$F90FLAGS -cpp -ffree-line-length-512 -DMPI" ' -installopts = '-f Makefile.intel-mpi PREFIX=%(installdir)s ' - -# No dependecies in the Makefile -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/wt.x'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-gpsmkl-2021.eb b/Golden_Repo/w/WannierTools/WannierTools-2.5.1-gpsmkl-2021.eb deleted file mode 100644 index 46d2a2ef598ac3db8c60059041592e4b2d2f5a72..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-gpsmkl-2021.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'WannierTools' -version = '2.5.1' - -homepage = 'http://www.wanniertools.com' -description = """WannierTools is an open source software that studies the physical properties of given tight-binding -model.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/quanshengwu/wannier_tools/archive'] -sources = ['V%(version)s.tar.gz'] -patches = ['WannierTools-2.5.1_fix_Makefile.patch'] -checksums = [ - '22aad8e95f2da07ac2dd892c2ca0438010779e6cc542e30f34ea5c3f580fd72c', # V2.5.1.tar.gz - # WannierTools-2.5.1_fix_Makefile.patch - '1752319cad40fc258c5b9753baa9db4ca1c5a4d931234164d66212c7ba92f7cb', -] - -skipsteps = ['configure'] - -prebuildopts = 'cd src && ' -preinstallopts = prebuildopts - -# use -cpp -fallow-argument-mismatch -ffree-line-length-512 for GNU based compilers -# use -fpp for Intel based compilers -# We use the same Makefile regardless of toolchain due to the Makefile patch -buildopts = '-f Makefile.intel-mpi F90FLAGS="$F90FLAGS -cpp -fallow-argument-mismatch -ffree-line-length-512 -DMPI" ' -installopts = '-f Makefile.intel-mpi PREFIX=%(installdir)s ' - -# No dependecies in the Makefile -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/wt.x'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-2020.eb b/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-2020.eb deleted file mode 100644 index 41722c1a386b379b9e23f010aeea70a3663f9db7..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-2020.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'WannierTools' -version = '2.5.1' - -homepage = 'http://www.wanniertools.com' -description = """WannierTools is an open source software that studies the physical properties of given tight-binding -model.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/quanshengwu/wannier_tools/archive'] -sources = ['V%(version)s.tar.gz'] -patches = ['WannierTools-2.5.1_fix_Makefile.patch'] -checksums = [ - '22aad8e95f2da07ac2dd892c2ca0438010779e6cc542e30f34ea5c3f580fd72c', # V2.5.1.tar.gz - # WannierTools-2.5.1_fix_Makefile.patch - '1752319cad40fc258c5b9753baa9db4ca1c5a4d931234164d66212c7ba92f7cb', -] - -skipsteps = ['configure'] - -prebuildopts = 'cd src && ' -preinstallopts = prebuildopts - -# use -cpp -fallow-argument-mismatch -ffree-line-length-512 for GNU based compilers -# use -fpp for Intel based compilers -# We use the same Makefile regardless of toolchain due to the Makefile patch -buildopts = '-f Makefile.intel-mpi F90FLAGS="$F90FLAGS -fpp -DMPI" ' -installopts = '-f Makefile.intel-mpi PREFIX=%(installdir)s ' - -# No dependecies in the Makefile -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/wt.x'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-2021.eb b/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-2021.eb deleted file mode 100644 index 2a0340472189e73399575c0c10531fd70b8fe202..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-2021.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'WannierTools' -version = '2.5.1' - -homepage = 'http://www.wanniertools.com' -description = """WannierTools is an open source software that studies the physical properties of given tight-binding -model.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/quanshengwu/wannier_tools/archive'] -sources = ['V%(version)s.tar.gz'] -patches = ['WannierTools-2.5.1_fix_Makefile.patch'] -checksums = [ - '22aad8e95f2da07ac2dd892c2ca0438010779e6cc542e30f34ea5c3f580fd72c', # V2.5.1.tar.gz - # WannierTools-2.5.1_fix_Makefile.patch - '1752319cad40fc258c5b9753baa9db4ca1c5a4d931234164d66212c7ba92f7cb', -] - -skipsteps = ['configure'] - -prebuildopts = 'cd src && ' -preinstallopts = prebuildopts - -# use -cpp -fallow-argument-mismatch -ffree-line-length-512 for GNU based compilers -# use -fpp for Intel based compilers -# We use the same Makefile regardless of toolchain due to the Makefile patch -buildopts = '-f Makefile.intel-mpi F90FLAGS="$F90FLAGS -fpp -DMPI" ' -installopts = '-f Makefile.intel-mpi PREFIX=%(installdir)s ' - -# No dependecies in the Makefile -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/wt.x'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-para-2020.eb b/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-para-2020.eb deleted file mode 100644 index 115042c14ec548d208d1cbdaf9e5378fa57fd019..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-para-2020.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'WannierTools' -version = '2.5.1' - -homepage = 'http://www.wanniertools.com' -description = """WannierTools is an open source software that studies the physical properties of given tight-binding -model.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/quanshengwu/wannier_tools/archive'] -sources = ['V%(version)s.tar.gz'] -patches = ['WannierTools-2.5.1_fix_Makefile.patch'] -checksums = [ - '22aad8e95f2da07ac2dd892c2ca0438010779e6cc542e30f34ea5c3f580fd72c', # V2.5.1.tar.gz - # WannierTools-2.5.1_fix_Makefile.patch - '1752319cad40fc258c5b9753baa9db4ca1c5a4d931234164d66212c7ba92f7cb', -] - -skipsteps = ['configure'] - -prebuildopts = 'cd src && ' -preinstallopts = prebuildopts - -# use -cpp -fallow-argument-mismatch -ffree-line-length-512 for GNU based compilers -# use -fpp for Intel based compilers -# We use the same Makefile regardless of toolchain due to the Makefile patch -buildopts = '-f Makefile.intel-mpi F90FLAGS="$F90FLAGS -fpp -DMPI" ' -installopts = '-f Makefile.intel-mpi PREFIX=%(installdir)s ' - -# No dependecies in the Makefile -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/wt.x'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-para-2021.eb b/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-para-2021.eb deleted file mode 100644 index f8ffeab4907865897a3f1edb13b7343e1d9096ea..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/WannierTools/WannierTools-2.5.1-intel-para-2021.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'WannierTools' -version = '2.5.1' - -homepage = 'http://www.wanniertools.com' -description = """WannierTools is an open source software that studies the physical properties of given tight-binding -model.""" - -site_contacts = 'Sebastian Achilles <s.achilles@fz-juelich.de>' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'usempi': True} - -source_urls = ['https://github.com/quanshengwu/wannier_tools/archive'] -sources = ['V%(version)s.tar.gz'] -patches = ['WannierTools-2.5.1_fix_Makefile.patch'] -checksums = [ - '22aad8e95f2da07ac2dd892c2ca0438010779e6cc542e30f34ea5c3f580fd72c', # V2.5.1.tar.gz - # WannierTools-2.5.1_fix_Makefile.patch - '1752319cad40fc258c5b9753baa9db4ca1c5a4d931234164d66212c7ba92f7cb', -] - -skipsteps = ['configure'] - -prebuildopts = 'cd src && ' -preinstallopts = prebuildopts - -# use -cpp -fallow-argument-mismatch -ffree-line-length-512 for GNU based compilers -# use -fpp for Intel based compilers -# We use the same Makefile regardless of toolchain due to the Makefile patch -buildopts = '-f Makefile.intel-mpi F90FLAGS="$F90FLAGS -fpp -DMPI" ' -installopts = '-f Makefile.intel-mpi PREFIX=%(installdir)s ' - -# No dependecies in the Makefile -parallel = 1 - -sanity_check_paths = { - 'files': ['bin/wt.x'], - 'dirs': [] -} - -moduleclass = 'chem' diff --git a/Golden_Repo/w/WannierTools/WannierTools-2.5.1_fix_Makefile.patch b/Golden_Repo/w/WannierTools/WannierTools-2.5.1_fix_Makefile.patch deleted file mode 100644 index 7f94aaab99863e43e81ea503dcd366d21e2884dd..0000000000000000000000000000000000000000 --- a/Golden_Repo/w/WannierTools/WannierTools-2.5.1_fix_Makefile.patch +++ /dev/null @@ -1,55 +0,0 @@ -Patch Makefile to be EB friendly. -We use just one Makefile regardless of toolchain. - -Åke Sandgren, 2021-04-27 -diff -ru wannier_tools-2.5.1.orig/src/Makefile.intel-mpi wannier_tools-2.5.1/src/Makefile.intel-mpi ---- wannier_tools-2.5.1.orig/src/Makefile.intel-mpi 2020-03-10 20:15:21.000000000 +0100 -+++ wannier_tools-2.5.1/src/Makefile.intel-mpi 2021-04-27 16:24:07.826585523 +0200 -@@ -11,31 +11,34 @@ - main.o - - # compiler --F90 = mpiifort -fpp -DMPI -+#F90 = mpiifort -fpp -DMPI - --INCLUDE = -I${MKLROOT}/include --WFLAG = -nogen-interface --OFLAG = -O3 -g -traceback -static-intel --FFLAG = $(OFLAG) $(WFLAG) --LFLAG = $(OFLAG) -+#INCLUDE = -I${MKLROOT}/include -+#WFLAG = -nogen-interface -+#OFLAG = -O3 -g -traceback -static-intel -+#FFLAG = $(OFLAG) $(WFLAG) -+#LFLAG = $(OFLAG) - - # blas and lapack libraries - # static linking --LIBS = -Wl,--start-group ${MKLROOT}/lib/intel64/libmkl_intel_lp64.a \ -- ${MKLROOT}/lib/intel64/libmkl_sequential.a \ -- ${MKLROOT}/lib/intel64/libmkl_core.a -Wl,--end-group -lpthread -lm -ldl -+#LIBS = -Wl,--start-group ${MKLROOT}/lib/intel64/libmkl_intel_lp64.a \ -+# ${MKLROOT}/lib/intel64/libmkl_sequential.a \ -+# ${MKLROOT}/lib/intel64/libmkl_core.a -Wl,--end-group -lpthread -lm -ldl - - # dynamic linking - # LIBS = -L/${MKLROOT}/lib/intel64 -lmkl_core -lmkl_sequential -lmkl_intel_lp64 -lpthread - --main : $(OBJ) -- $(F90) $(LFLAG) $(OBJ) -o wt.x $(LIBS) -- cp -f wt.x ../bin -+wt.x : $(OBJ) -+ $(F90) $(LDFLAGS) $(OBJ) -o wt.x $(LIBLAPACK) $(LIBBLAS) - - .SUFFIXES: .o .f90 - - .f90.o : -- $(F90) $(FFLAG) $(INCLUDE) -c $*.f90 -+ $(F90) $(F90FLAGS) -c $*.f90 -+ -+install: wt.x -+ install -d $(PREFIX)/bin -+ install wt.x $(PREFIX)/bin - - clean : - rm -f *.o *.mod *~ wt.x diff --git a/Golden_Repo/x/X11/X11-20200222-GCCcore-10.3.0.eb b/Golden_Repo/x/X11/X11-20200222-GCCcore-10.3.0.eb deleted file mode 100644 index ebbbdac8089c96eff21cfaf30d37fffa50022b80..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/X11/X11-20200222-GCCcore-10.3.0.eb +++ /dev/null @@ -1,217 +0,0 @@ -easyblock = 'Bundle' - -name = 'X11' -version = '20200222' - -homepage = 'https://www.x.org' -description = "The X Window System (X11) is a windowing system for bitmap displays" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - XORG_LIB_SOURCE, - XORG_PROTO_SOURCE, - 'https://xcb.freedesktop.org/dist/', - 'https://xkbcommon.org/download/', - XORG_DATA_SOURCE + '/xkeyboard-config', - XORG_DATA_SOURCE, - 'https://www.x.org/archive/individual/app/', - 'https://www.x.org/archive/individual/font/' -] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), - ('Bison', '3.7.6'), - ('gettext', '0.20.2'), - ('pkg-config', '0.29.2'), - ('intltool', '0.51.0', '-Perl-5.32.0'), -] -dependencies = [ - ('bzip2', '1.0.8'), - ('fontconfig', '2.13.92'), - ('freetype', '2.10.1'), - ('zlib', '1.2.11'), - ('xorg-macros', '1.19.2'), - ('libpciaccess', '0.16'), -] - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCE_TAR_GZ], - 'start_dir': '%(name)s-%(version)s', -} -components = [ - ('libpthread-stubs', '0.4', { # 2017-03-14 - 'checksums': ['50d5686b79019ccea08bcbd7b02fe5a40634abcfd4146b6e75c6420cc170e9d9'], - }), - ('xorgproto', '2019.2', { # 2019-10-17 - 'checksums': ['ebfcfce48b66bec25d5dff0e9510e04053ef78e51a8eabeeee4c00e399226d61'], - }), - ('libXau', '1.0.9', { # 2019-02-10 - 'checksums': ['1f123d8304b082ad63a9e89376400a3b1d4c29e67e3ea07b3f659cccca690eea'], - }), - ('libXdmcp', '1.1.3', { # 2019-03-16 - 'checksums': ['2ef9653d32e09d1bf1b837d0e0311024979653fe755ad3aaada8db1aa6ea180c'], - }), - ('xcb-proto', '1.14', { # 2020-02-22 - 'checksums': ['1c3fa23d091fb5e4f1e9bf145a902161cec00d260fabf880a7a248b02ab27031'], - }), - ('libxcb', '1.14', { # 2020-02-22 - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['2c7fcddd1da34d9b238c9caeda20d3bd7486456fc50b3cc6567185dbd5b0ad02'], - }), - ('xtrans', '1.4.0', { # 2019-03-16 - 'checksums': ['48ed850ce772fef1b44ca23639b0a57e38884045ed2cbb18ab137ef33ec713f9'], - }), - ('libxkbcommon', '0.8.4', { # 2019-02-22 - 'sources': ['libxkbcommon-%(version)s.tar.xz'], - 'checksums': ['60ddcff932b7fd352752d51a5c4f04f3d0403230a584df9a2e0d5ed87c486c8b'], - 'start_dir': 'libxkbcommon-%(version)s', - }), - ('libX11', '1.6.9', { # 2019-10-09 - 'checksums': ['b8c0930a9b25de15f3d773288cacd5e2f0a4158e194935615c52aeceafd1107b'], - }), - ('libXext', '1.3.4', { # 2019-03-16 - 'checksums': ['8ef0789f282826661ff40a8eef22430378516ac580167da35cc948be9041aac1'], - }), - ('libFS', '1.0.8', { # 2019-03-10 - 'checksums': ['e3da723257f4f4c0c629aec402e0a36fbec66a9418f70d24a159cb0470ec83d2'], - }), - ('libICE', '1.0.10', { # 2019-07-14 - 'checksums': ['1116bc64c772fd127a0d0c0ffa2833479905e3d3d8197740b3abd5f292f22d2d'], - }), - ('libSM', '1.2.3', { # 2018-10-10 - 'checksums': ['1e92408417cb6c6c477a8a6104291001a40b3bb56a4a60608fdd9cd2c5a0f320'], - }), - ('libXScrnSaver', '1.2.3', { # 2018-07-05 - 'checksums': ['4f74e7e412144591d8e0616db27f433cfc9f45aae6669c6c4bb03e6bf9be809a'], - }), - ('libXt', '1.2.0', { # 2019-06-21 - 'checksums': ['d4bee88898fc5e1dc470e361430c72fbc529b9cdbbb6c0ed3affea3a39f97d8d'], - }), - ('libXmu', '1.1.3', { # 2019-03-16 - 'checksums': ['5bd9d4ed1ceaac9ea023d86bf1c1632cd3b172dce4a193a72a94e1d9df87a62e'], - }), - ('libXpm', '3.5.13', { # 2019-12-13 - 'checksums': ['e3dfb0fb8c1f127432f2a498c7856b37ce78a61e8da73f1aab165a73dd97ad00'], - }), - ('libXaw', '1.0.13', { # 2015-05-01 - 'checksums': ['7e74ac3e5f67def549722ff0333d6e6276b8becd9d89615cda011e71238ab694'], - }), - ('libXfixes', '5.0.3', { # 2016-10-04 - 'checksums': ['9ab6c13590658501ce4bd965a8a5d32ba4d8b3bb39a5a5bc9901edffc5666570'], - }), - ('libXcomposite', '0.4.5', { # 2019-03-11 - 'checksums': ['581c7fc0f41a99af38b1c36b9be64bc13ef3f60091cd3f01105bbc7c01617d6c'], - }), - ('libXrender', '0.9.10', { # 2016-10-04 - 'checksums': ['770527cce42500790433df84ec3521e8bf095dfe5079454a92236494ab296adf'], - }), - ('libXcursor', '1.2.0', { # 2019-03-11 - 'checksums': ['ad5b2574fccaa4c3fa67b9874fbed863d29ad230c784e9a08b20692418f6a1f8'], - }), - ('libXdamage', '1.1.5', { # 2019-03-11 - 'checksums': ['630ec53abb8c2d6dac5cd9f06c1f73ffb4a3167f8118fdebd77afd639dbc2019'], - }), - ('libfontenc', '1.1.4', { # 2019-02-20 - 'checksums': ['895ee0986b32fbfcda7f4f25ef6cbacfa760e1690bf59f02085ce0e7d1eebb41'], - }), - ('libXfont', '1.5.4', { # 2017-11-28 - 'checksums': ['59be6eab53f7b0feb6b7933c11d67d076ae2c0fd8921229c703fc7a4e9a80d6e'], - }), - ('libXfont2', '2.0.3', { # 2019-09-14 - 'checksums': ['a4b761a37528353a2b83dba364d7c1fd6aef2d554a1a019815f24f7f8866890e'], - }), - ('libXft', '2.3.3', { # 2019-03-16 - 'checksums': ['3c3cf88b1a96e49a3d87d67d9452d34b6e25e96ae83959b8d0a980935014d701'], - }), - ('libXi', '1.7.10', { # 2019-06-19 - 'checksums': ['b51e106c445a49409f3da877aa2f9129839001b24697d75a54e5c60507e9a5e3'], - }), - ('libXinerama', '1.1.4', { # 2018-07-05 - 'checksums': ['64de45e18cc76b8e703cb09b3c9d28bd16e3d05d5cd99f2d630de2d62c3acc18'], - }), - ('libXrandr', '1.5.2', { # 2019-03-16 - 'checksums': ['3f10813ab355e7a09f17e147d61b0ce090d898a5ea5b5519acd0ef68675dcf8e'], - }), - ('libXres', '1.2.0', { # 2017-10-11 - 'checksums': ['5b62feee09f276d74054787df030fceb41034de84174abec6d81c591145e043a'], - }), - ('libXtst', '1.2.3', { # 2016-10-04 - 'checksums': ['a0c83acce02d4923018c744662cb28eb0dbbc33b4adc027726879ccf68fbc2c2'], - }), - ('libXv', '1.0.11', { # 2016-10-04 - 'checksums': ['c4112532889b210e21cf05f46f0f2f8354ff7e1b58061e12d7a76c95c0d47bb1'], - }), - ('libXvMC', '1.0.12', { # 2019-09-24 - 'checksums': ['024c9ec4f001f037eeca501ee724c7e51cf287eb69ced8c6126e16e7fa9864b5'], - }), - ('libXxf86dga', '1.1.5', { # 2019-03-16 - 'checksums': ['715e2bf5caf6276f0858eb4b11a1aef1a26beeb40dce2942387339da395bef69'], - }), - ('libXxf86vm', '1.1.4', { # 2015-02-24 - 'checksums': ['5108553c378a25688dcb57dca383664c36e293d60b1505815f67980ba9318a99'], - }), - ('libdmx', '1.1.4', { # 2018-05-14 - 'checksums': ['4d05bd5b248c1f46729fa1536b7a5e4d692567327ad41564c36742fb327af925'], - }), - ('libxkbfile', '1.1.0', { # 2019-03-16 - 'checksums': ['2a92adda3992aa7cbad758ef0b8dfeaedebb49338b772c64ddf369d78c1c51d3'], - }), - ('libxshmfence', '1.3', { # 2018-02-26 - 'checksums': ['7eb3d46ad91bab444f121d475b11b39273142d090f7e9ac43e6a87f4ff5f902c'], - }), - ('xcb-util', '0.4.0', { # 2014-10-15 - 'checksums': ['0ed0934e2ef4ddff53fcc70fc64fb16fe766cd41ee00330312e20a985fd927a7'], - }), - ('xcb-util-image', '0.4.0', { # 2014-10-15 - 'checksums': ['cb2c86190cf6216260b7357a57d9100811bb6f78c24576a3a5bfef6ad3740a42'], - }), - ('xcb-util-keysyms', '0.4.0', { # 2014-10-01 - 'checksums': ['0807cf078fbe38489a41d755095c58239e1b67299f14460dec2ec811e96caa96'], - }), - ('xcb-util-renderutil', '0.3.9', { # 2014-06-13 - 'checksums': ['55eee797e3214fe39d0f3f4d9448cc53cffe06706d108824ea37bb79fcedcad5'], - }), - ('xcb-util-wm', '0.4.1', { # 2014-02-19 - 'checksums': ['038b39c4bdc04a792d62d163ba7908f4bb3373057208c07110be73c1b04b8334'], - }), - ('xcb-util-cursor', '0.1.3', { # 2016-05-12 - 'checksums': ['a322332716a384c94d3cbf98f2d8fe2ce63c2fe7e2b26664b6cea1d411723df8'], - }), - ('xkeyboard-config', '2.28', { # 2019-10-19 - 'checksums': ['4424ffaafdf9f09dea69a317709353c4e2b19f69b2405effadce0bac3bdebdff'], - }), - ('printproto', '1.0.5', { # 2011-01-06 - 'checksums': ['e8b6f405fd865f0ea7a3a2908dfbf06622f57f2f91359ec65d13b955e49843fc'], - }), - ('libXp', '1.0.3', { # 2015-02-21 - 'checksums': ['f6b8cc4ef05d3eafc9ef5fc72819dd412024b4ed60197c0d5914758125817e9c'], - }), - ('xbitmaps', '1.1.2', { # 2018-03-10 - 'checksums': ['27e700e8ee02c43f7206f4eca8f1953ad15236cac95d7a0f08505c3f7d99c265'], - }), - ('xkbcomp', '1.4.2', { # 2018-06-07 - 'checksums': ['962a3d550ad0058c141c2bf16eed6498d0f86d987141f875cb0a5df5696487d7'], - }), - ('font-util', '1.3.1', { - 'checksums': ['34ebb0c9c14e0a392cdd5ea055c92489ad88d55ae148b2f1cfded0f3f63f2b5b'], - }), - ('xauth', '1.0.10', { - 'checksums': ['5196821221d824b9bc278fa6505c595acee1d374518a52217d9b64d3c63dedd0'], - }), -] - -preconfigopts = "if [ ! -f configure ]; then ./autogen.sh; fi && " - -sanity_check_paths = { - 'files': ['include/X11/Xlib.h', 'include/X11/Xutil.h'], - 'dirs': ['include/GL', 'include/X11', 'include/X11/extensions', 'lib/pkgconfig', - 'share/pkgconfig', 'share/X11/xkb'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/x/X11/X11-20200222-GCCcore-9.3.0.eb b/Golden_Repo/x/X11/X11-20200222-GCCcore-9.3.0.eb deleted file mode 100644 index 5c83aad285fa670515421ca7de00f4e34724b30e..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/X11/X11-20200222-GCCcore-9.3.0.eb +++ /dev/null @@ -1,217 +0,0 @@ -easyblock = 'Bundle' - -name = 'X11' -version = '20200222' - -homepage = 'https://www.x.org' -description = "The X Window System (X11) is a windowing system for bitmap displays" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [ - XORG_LIB_SOURCE, - XORG_PROTO_SOURCE, - 'https://xcb.freedesktop.org/dist/', - 'https://xkbcommon.org/download/', - XORG_DATA_SOURCE + '/xkeyboard-config', - XORG_DATA_SOURCE, - 'https://www.x.org/archive/individual/app/', - 'https://www.x.org/archive/individual/font/' -] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), - ('Bison', '3.6.4'), - ('gettext', '0.20.2'), - ('pkg-config', '0.29.2'), - ('intltool', '0.51.0', '-Perl-5.32.0'), -] -dependencies = [ - ('bzip2', '1.0.8'), - ('fontconfig', '2.13.92'), - ('freetype', '2.10.1'), - ('zlib', '1.2.11'), - ('xorg-macros', '1.19.2'), - ('libpciaccess', '0.16'), -] - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCE_TAR_GZ], - 'start_dir': '%(name)s-%(version)s', -} -components = [ - ('libpthread-stubs', '0.4', { # 2017-03-14 - 'checksums': ['50d5686b79019ccea08bcbd7b02fe5a40634abcfd4146b6e75c6420cc170e9d9'], - }), - ('xorgproto', '2019.2', { # 2019-10-17 - 'checksums': ['ebfcfce48b66bec25d5dff0e9510e04053ef78e51a8eabeeee4c00e399226d61'], - }), - ('libXau', '1.0.9', { # 2019-02-10 - 'checksums': ['1f123d8304b082ad63a9e89376400a3b1d4c29e67e3ea07b3f659cccca690eea'], - }), - ('libXdmcp', '1.1.3', { # 2019-03-16 - 'checksums': ['2ef9653d32e09d1bf1b837d0e0311024979653fe755ad3aaada8db1aa6ea180c'], - }), - ('xcb-proto', '1.14', { # 2020-02-22 - 'checksums': ['1c3fa23d091fb5e4f1e9bf145a902161cec00d260fabf880a7a248b02ab27031'], - }), - ('libxcb', '1.14', { # 2020-02-22 - 'sources': [SOURCE_TAR_GZ], - 'checksums': ['2c7fcddd1da34d9b238c9caeda20d3bd7486456fc50b3cc6567185dbd5b0ad02'], - }), - ('xtrans', '1.4.0', { # 2019-03-16 - 'checksums': ['48ed850ce772fef1b44ca23639b0a57e38884045ed2cbb18ab137ef33ec713f9'], - }), - ('libxkbcommon', '0.8.4', { # 2019-02-22 - 'sources': ['libxkbcommon-%(version)s.tar.xz'], - 'checksums': ['60ddcff932b7fd352752d51a5c4f04f3d0403230a584df9a2e0d5ed87c486c8b'], - 'start_dir': 'libxkbcommon-%(version)s', - }), - ('libX11', '1.6.9', { # 2019-10-09 - 'checksums': ['b8c0930a9b25de15f3d773288cacd5e2f0a4158e194935615c52aeceafd1107b'], - }), - ('libXext', '1.3.4', { # 2019-03-16 - 'checksums': ['8ef0789f282826661ff40a8eef22430378516ac580167da35cc948be9041aac1'], - }), - ('libFS', '1.0.8', { # 2019-03-10 - 'checksums': ['e3da723257f4f4c0c629aec402e0a36fbec66a9418f70d24a159cb0470ec83d2'], - }), - ('libICE', '1.0.10', { # 2019-07-14 - 'checksums': ['1116bc64c772fd127a0d0c0ffa2833479905e3d3d8197740b3abd5f292f22d2d'], - }), - ('libSM', '1.2.3', { # 2018-10-10 - 'checksums': ['1e92408417cb6c6c477a8a6104291001a40b3bb56a4a60608fdd9cd2c5a0f320'], - }), - ('libXScrnSaver', '1.2.3', { # 2018-07-05 - 'checksums': ['4f74e7e412144591d8e0616db27f433cfc9f45aae6669c6c4bb03e6bf9be809a'], - }), - ('libXt', '1.2.0', { # 2019-06-21 - 'checksums': ['d4bee88898fc5e1dc470e361430c72fbc529b9cdbbb6c0ed3affea3a39f97d8d'], - }), - ('libXmu', '1.1.3', { # 2019-03-16 - 'checksums': ['5bd9d4ed1ceaac9ea023d86bf1c1632cd3b172dce4a193a72a94e1d9df87a62e'], - }), - ('libXpm', '3.5.13', { # 2019-12-13 - 'checksums': ['e3dfb0fb8c1f127432f2a498c7856b37ce78a61e8da73f1aab165a73dd97ad00'], - }), - ('libXaw', '1.0.13', { # 2015-05-01 - 'checksums': ['7e74ac3e5f67def549722ff0333d6e6276b8becd9d89615cda011e71238ab694'], - }), - ('libXfixes', '5.0.3', { # 2016-10-04 - 'checksums': ['9ab6c13590658501ce4bd965a8a5d32ba4d8b3bb39a5a5bc9901edffc5666570'], - }), - ('libXcomposite', '0.4.5', { # 2019-03-11 - 'checksums': ['581c7fc0f41a99af38b1c36b9be64bc13ef3f60091cd3f01105bbc7c01617d6c'], - }), - ('libXrender', '0.9.10', { # 2016-10-04 - 'checksums': ['770527cce42500790433df84ec3521e8bf095dfe5079454a92236494ab296adf'], - }), - ('libXcursor', '1.2.0', { # 2019-03-11 - 'checksums': ['ad5b2574fccaa4c3fa67b9874fbed863d29ad230c784e9a08b20692418f6a1f8'], - }), - ('libXdamage', '1.1.5', { # 2019-03-11 - 'checksums': ['630ec53abb8c2d6dac5cd9f06c1f73ffb4a3167f8118fdebd77afd639dbc2019'], - }), - ('libfontenc', '1.1.4', { # 2019-02-20 - 'checksums': ['895ee0986b32fbfcda7f4f25ef6cbacfa760e1690bf59f02085ce0e7d1eebb41'], - }), - ('libXfont', '1.5.4', { # 2017-11-28 - 'checksums': ['59be6eab53f7b0feb6b7933c11d67d076ae2c0fd8921229c703fc7a4e9a80d6e'], - }), - ('libXfont2', '2.0.3', { # 2019-09-14 - 'checksums': ['a4b761a37528353a2b83dba364d7c1fd6aef2d554a1a019815f24f7f8866890e'], - }), - ('libXft', '2.3.3', { # 2019-03-16 - 'checksums': ['3c3cf88b1a96e49a3d87d67d9452d34b6e25e96ae83959b8d0a980935014d701'], - }), - ('libXi', '1.7.10', { # 2019-06-19 - 'checksums': ['b51e106c445a49409f3da877aa2f9129839001b24697d75a54e5c60507e9a5e3'], - }), - ('libXinerama', '1.1.4', { # 2018-07-05 - 'checksums': ['64de45e18cc76b8e703cb09b3c9d28bd16e3d05d5cd99f2d630de2d62c3acc18'], - }), - ('libXrandr', '1.5.2', { # 2019-03-16 - 'checksums': ['3f10813ab355e7a09f17e147d61b0ce090d898a5ea5b5519acd0ef68675dcf8e'], - }), - ('libXres', '1.2.0', { # 2017-10-11 - 'checksums': ['5b62feee09f276d74054787df030fceb41034de84174abec6d81c591145e043a'], - }), - ('libXtst', '1.2.3', { # 2016-10-04 - 'checksums': ['a0c83acce02d4923018c744662cb28eb0dbbc33b4adc027726879ccf68fbc2c2'], - }), - ('libXv', '1.0.11', { # 2016-10-04 - 'checksums': ['c4112532889b210e21cf05f46f0f2f8354ff7e1b58061e12d7a76c95c0d47bb1'], - }), - ('libXvMC', '1.0.12', { # 2019-09-24 - 'checksums': ['024c9ec4f001f037eeca501ee724c7e51cf287eb69ced8c6126e16e7fa9864b5'], - }), - ('libXxf86dga', '1.1.5', { # 2019-03-16 - 'checksums': ['715e2bf5caf6276f0858eb4b11a1aef1a26beeb40dce2942387339da395bef69'], - }), - ('libXxf86vm', '1.1.4', { # 2015-02-24 - 'checksums': ['5108553c378a25688dcb57dca383664c36e293d60b1505815f67980ba9318a99'], - }), - ('libdmx', '1.1.4', { # 2018-05-14 - 'checksums': ['4d05bd5b248c1f46729fa1536b7a5e4d692567327ad41564c36742fb327af925'], - }), - ('libxkbfile', '1.1.0', { # 2019-03-16 - 'checksums': ['2a92adda3992aa7cbad758ef0b8dfeaedebb49338b772c64ddf369d78c1c51d3'], - }), - ('libxshmfence', '1.3', { # 2018-02-26 - 'checksums': ['7eb3d46ad91bab444f121d475b11b39273142d090f7e9ac43e6a87f4ff5f902c'], - }), - ('xcb-util', '0.4.0', { # 2014-10-15 - 'checksums': ['0ed0934e2ef4ddff53fcc70fc64fb16fe766cd41ee00330312e20a985fd927a7'], - }), - ('xcb-util-image', '0.4.0', { # 2014-10-15 - 'checksums': ['cb2c86190cf6216260b7357a57d9100811bb6f78c24576a3a5bfef6ad3740a42'], - }), - ('xcb-util-keysyms', '0.4.0', { # 2014-10-01 - 'checksums': ['0807cf078fbe38489a41d755095c58239e1b67299f14460dec2ec811e96caa96'], - }), - ('xcb-util-renderutil', '0.3.9', { # 2014-06-13 - 'checksums': ['55eee797e3214fe39d0f3f4d9448cc53cffe06706d108824ea37bb79fcedcad5'], - }), - ('xcb-util-wm', '0.4.1', { # 2014-02-19 - 'checksums': ['038b39c4bdc04a792d62d163ba7908f4bb3373057208c07110be73c1b04b8334'], - }), - ('xcb-util-cursor', '0.1.3', { # 2016-05-12 - 'checksums': ['a322332716a384c94d3cbf98f2d8fe2ce63c2fe7e2b26664b6cea1d411723df8'], - }), - ('xkeyboard-config', '2.28', { # 2019-10-19 - 'checksums': ['4424ffaafdf9f09dea69a317709353c4e2b19f69b2405effadce0bac3bdebdff'], - }), - ('printproto', '1.0.5', { # 2011-01-06 - 'checksums': ['e8b6f405fd865f0ea7a3a2908dfbf06622f57f2f91359ec65d13b955e49843fc'], - }), - ('libXp', '1.0.3', { # 2015-02-21 - 'checksums': ['f6b8cc4ef05d3eafc9ef5fc72819dd412024b4ed60197c0d5914758125817e9c'], - }), - ('xbitmaps', '1.1.2', { # 2018-03-10 - 'checksums': ['27e700e8ee02c43f7206f4eca8f1953ad15236cac95d7a0f08505c3f7d99c265'], - }), - ('xkbcomp', '1.4.2', { # 2018-06-07 - 'checksums': ['962a3d550ad0058c141c2bf16eed6498d0f86d987141f875cb0a5df5696487d7'], - }), - ('font-util', '1.3.1', { - 'checksums': ['34ebb0c9c14e0a392cdd5ea055c92489ad88d55ae148b2f1cfded0f3f63f2b5b'], - }), - ('xauth', '1.0.10', { - 'checksums': ['5196821221d824b9bc278fa6505c595acee1d374518a52217d9b64d3c63dedd0'], - }), -] - -preconfigopts = "if [ ! -f configure ]; then ./autogen.sh; fi && " - -sanity_check_paths = { - 'files': ['include/X11/Xlib.h', 'include/X11/Xutil.h'], - 'dirs': ['include/GL', 'include/X11', 'include/X11/extensions', 'lib/pkgconfig', - 'share/pkgconfig', 'share/X11/xkb'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/x/XCFun/XCFun-20190127-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/x/XCFun/XCFun-20190127-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index b0ea6729329e74a97abf5f7380f97f1793923e05..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XCFun/XCFun-20190127-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'XCFun' -version = '20190127' -local_commit = 'a486a3f' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://dftlibs.org/xcfun/' -description = """ XCFun is a library of DFT exchange-correlation (XC) functionals. - It is based on automatic differentiation and can therefore generate arbitrary order - derivatives of these functionals. """ - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - -site_contacts = 'j.goebbert@fz-juelich.de' - -source_urls = ['https://github.com/dftlibs/xcfun/archive/'] -sources = [{'download_filename': '%s.tar.gz' % local_commit, 'filename': SOURCE_TAR_GZ}] -checksums = ['afde6dbbc8c2167e986a9a3d41716e7e2f0981d7ede8106065c6309588460972'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM) -] - -dependencies = [ - ('Python', '3.8.5'), -] - -separate_build_dir = True - -sanity_check_paths = { - 'files': ['lib/libxcfun.a', 'lib/libxcfun.%s' % SHLIB_EXT], - 'dirs': ['include/XCFun'] -} - -modextrapaths = {'CPATH': 'include/XCFun'} - -moduleclass = 'tools' diff --git a/Golden_Repo/x/XCFun/XCFun-20190127-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb b/Golden_Repo/x/XCFun/XCFun-20190127-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb deleted file mode 100644 index 88365a887ccd7205de0c4cdb960dea453292f965..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XCFun/XCFun-20190127-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'XCFun' -version = '20190127' -local_commit = 'a486a3f' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'http://dftlibs.org/xcfun/' -description = """ XCFun is a library of DFT exchange-correlation (XC) functionals. - It is based on automatic differentiation and can therefore generate arbitrary order - derivatives of these functionals. """ - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} -toolchainopts = {'pic': True} - -site_contacts = 'j.goebbert@fz-juelich.de' - -source_urls = ['https://github.com/dftlibs/xcfun/archive/'] -sources = [{'download_filename': '%s.tar.gz' % local_commit, 'filename': SOURCE_TAR_GZ}] -checksums = ['afde6dbbc8c2167e986a9a3d41716e7e2f0981d7ede8106065c6309588460972'] - -builddependencies = [ - ('CMake', '3.18.0') -] - -dependencies = [ - ('Python', '3.8.5'), -] - -separate_build_dir = True - -sanity_check_paths = { - 'files': ['lib/libxcfun.a', 'lib/libxcfun.%s' % SHLIB_EXT], - 'dirs': ['include/XCFun'] -} - -modextrapaths = {'CPATH': 'include/XCFun'} - -moduleclass = 'tools' diff --git a/Golden_Repo/x/XGBoost/XGBoost-1.3.3-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb b/Golden_Repo/x/XGBoost/XGBoost-1.3.3-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb deleted file mode 100644 index b7e9d32db392a58b6c06fbfbbdd4c1a95ec28b07..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XGBoost/XGBoost-1.3.3-gcccoremkl-10.3.0-2021.2.0-Python-3.8.5.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'XGBoost' -version = '1.2.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/dmlc/xgboost' -description = """XGBoost is an optimized distributed gradient boosting library designed to be highly efficient, - flexible and portable.""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '10.3.0-2021.2.0'} -toolchainopts = {'pic': True} - - -source_urls = [PYPI_SOURCE] -sources = [SOURCELOWER_TAR_GZ] - - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('Ninja', '1.10.0'), - ('Ninja-Python', '1.10.0', versionsuffix), -] - -dependencies = [ - ('Python', '3.8.5'), - ('SciPy-Stack', '2021', versionsuffix), -] - -use_pip = True -download_dep_fail = True -sanity_pip_check = True - -moduleclass = 'lib' diff --git a/Golden_Repo/x/XServer/0002-Constant-DPI.patch b/Golden_Repo/x/XServer/0002-Constant-DPI.patch deleted file mode 100644 index f91e53d1e4934d615e16b7d975d2a6bb8ddc9238..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XServer/0002-Constant-DPI.patch +++ /dev/null @@ -1,96 +0,0 @@ ---- a/src/dummy.h 2016-12-17 23:02:53.396287041 +0100 -+++ b/src/dummy.h 2016-12-17 23:03:30.319616550 +0100 -@@ -51,6 +51,7 @@ - /* options */ - OptionInfoPtr Options; - Bool swCursor; -+ Bool constantDPI; - /* proc pointer */ - CloseScreenProcPtr CloseScreen; - xf86CursorInfoPtr CursorInfo; ---- a/src/dummy_driver.c 2016-12-14 21:54:20.000000000 +0100 -+++ b/src/dummy_driver.c 2016-12-17 23:04:59.916416126 +0100 -@@ -17,6 +17,12 @@ - /* All drivers using the mi colormap manipulation need this */ - #include "micmap.h" - -+#ifdef RANDR -+#include "randrstr.h" -+#endif -+ -+#include "windowstr.h" -+ - /* identifying atom needed by magnifiers */ - #include <X11/Xatom.h> - #include "property.h" -@@ -115,11 +121,15 @@ - }; - - typedef enum { -- OPTION_SW_CURSOR -+ OPTION_SW_CURSOR, -+ OPTION_CONSTANT_DPI - } DUMMYOpts; - - static const OptionInfoRec DUMMYOptions[] = { - { OPTION_SW_CURSOR, "SWcursor", OPTV_BOOLEAN, {0}, FALSE }, -+#ifdef RANDR -+ { OPTION_CONSTANT_DPI, "ConstantDPI", OPTV_BOOLEAN, {0}, FALSE }, -+#endif - { -1, NULL, OPTV_NONE, {0}, FALSE } - }; - -@@ -359,6 +369,7 @@ - xf86ProcessOptions(pScrn->scrnIndex, pScrn->options, dPtr->Options); - - xf86GetOptValBool(dPtr->Options, OPTION_SW_CURSOR,&dPtr->swCursor); -+ xf86GetOptValBool(dPtr->Options, OPTION_CONSTANT_DPI, &dPtr->constantDPI); - - if (device->videoRam != 0) { - pScrn->videoRam = device->videoRam; -@@ -639,10 +650,45 @@ - return TRUE; - } - -+const char *XDPY_PROPERTY = "dummy-constant-xdpi"; -+const char *YDPY_PROPERTY = "dummy-constant-ydpi"; -+static int get_dpi_value(WindowPtr root, const char *property_name, int default_dpi) -+{ -+ PropertyPtr prop; -+ Atom type_atom = MakeAtom("CARDINAL", 8, TRUE); -+ Atom prop_atom = MakeAtom(property_name, strlen(property_name), FALSE); -+ -+ for (prop = wUserProps(root); prop; prop = prop->next) { -+ if (prop->propertyName == prop_atom && prop->type == type_atom && prop->data) { -+ int v = (int) (*((CARD32 *) prop->data)); -+ if ((v>0) && (v<4096)) { -+ xf86DrvMsg(0, X_INFO, "get_constant_dpi_value() found property \"%s\" with value=%i\n", property_name, (int) v); -+ return (int) v; -+ } -+ break; -+ } -+ } -+ return default_dpi; -+} -+ - /* Mandatory */ - Bool - DUMMYSwitchMode(SWITCH_MODE_ARGS_DECL) - { -+ SCRN_INFO_PTR(arg); -+#ifdef RANDR -+ DUMMYPtr dPtr = DUMMYPTR(pScrn); -+ if (dPtr->constantDPI) { -+ int xDpi = get_dpi_value(pScrn->pScreen->root, XDPY_PROPERTY, pScrn->xDpi); -+ int yDpi = get_dpi_value(pScrn->pScreen->root, YDPY_PROPERTY, pScrn->yDpi); -+ //25.4 mm per inch: (254/10) -+ pScrn->pScreen->mmWidth = mode->HDisplay * 254 / xDpi / 10; -+ pScrn->pScreen->mmHeight = mode->VDisplay * 254 / yDpi / 10; -+ xf86DrvMsg(pScrn->scrnIndex, X_INFO, "mm(dpi %ix%i)=%ix%i\n", xDpi, yDpi, pScrn->pScreen->mmWidth, pScrn->pScreen->mmHeight); -+ RRScreenSizeNotify(pScrn->pScreen); -+ RRTellChanged(pScrn->pScreen); -+ } -+#endif - return TRUE; - } - diff --git a/Golden_Repo/x/XServer/0003-fix-pointer-limits.patch b/Golden_Repo/x/XServer/0003-fix-pointer-limits.patch deleted file mode 100644 index 3dbb6fd179ffde507036c62700c004914acc5cfb..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XServer/0003-fix-pointer-limits.patch +++ /dev/null @@ -1,39 +0,0 @@ ---- xf86-video-dummy-0.3.6/src/dummy_driver.c 2014-11-05 19:24:02.668656601 +0700 -+++ xf86-video-dummy-0.3.6.new/src/dummy_driver.c 2014-11-05 19:37:53.076061853 +0700 -@@ -55,6 +55,9 @@ - #include <X11/extensions/xf86dgaproto.h> - #endif - -+/* Needed for fixing pointer limits on resize */ -+#include "inputstr.h" -+ - /* Mandatory functions */ - static const OptionInfoRec * DUMMYAvailableOptions(int chipid, int busid); - static void DUMMYIdentify(int flags); -@@ -713,6 +716,26 @@ - RRTellChanged(pScrn->pScreen); - } - #endif -+ //ensure the screen dimensions are also updated: -+ pScrn->pScreen->width = mode->HDisplay; -+ pScrn->pScreen->height = mode->VDisplay; -+ pScrn->virtualX = mode->HDisplay; -+ pScrn->virtualY = mode->VDisplay; -+ pScrn->frameX1 = mode->HDisplay; -+ pScrn->frameY1 = mode->VDisplay; -+ -+ //ensure the pointer uses the new limits too: -+ DeviceIntPtr pDev; -+ SpritePtr pSprite; -+ for (pDev = inputInfo.devices; pDev; pDev = pDev->next) { -+ if (pDev->spriteInfo!=NULL && pDev->spriteInfo->sprite!=NULL) { -+ pSprite = pDev->spriteInfo->sprite; -+ pSprite->hotLimits.x2 = mode->HDisplay; -+ pSprite->hotLimits.y2 = mode->VDisplay; -+ pSprite->physLimits.x2 = mode->HDisplay; -+ pSprite->physLimits.y2 = mode->VDisplay; -+ } -+ } - return TRUE; - } - diff --git a/Golden_Repo/x/XServer/0005-support-for-30-bit-depth-in-dummy-driver.patch b/Golden_Repo/x/XServer/0005-support-for-30-bit-depth-in-dummy-driver.patch deleted file mode 100644 index 567db3fc38653bb21812e43a503ed233b663757e..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XServer/0005-support-for-30-bit-depth-in-dummy-driver.patch +++ /dev/null @@ -1,41 +0,0 @@ ---- a/src/dummy.h 2016-12-17 23:33:33.279533389 +0100 -+++ b/src/dummy.h 2016-12-17 23:33:56.695739166 +0100 -@@ -69,7 +69,7 @@ - int overlay_offset; - int videoKey; - int interlace; -- dummy_colors colors[256]; -+ dummy_colors colors[1024]; - pointer* FBBase; - Bool (*CreateWindow)() ; /* wrapped CreateWindow */ - Bool prop; ---- a/src/dummy_driver.c 2016-12-17 23:33:47.446657886 +0100 -+++ b/src/dummy_driver.c 2016-12-17 23:33:56.696739175 +0100 -@@ -317,6 +317,7 @@ - case 15: - case 16: - case 24: -+ case 30: - break; - default: - xf86DrvMsg(pScrn->scrnIndex, X_ERROR, -@@ -331,8 +332,8 @@ - pScrn->rgbBits = 8; - - /* Get the depth24 pixmap format */ -- if (pScrn->depth == 24 && pix24bpp == 0) -- pix24bpp = xf86GetBppFromDepth(pScrn, 24); -+ if (pScrn->depth >= 24 && pix24bpp == 0) -+ pix24bpp = xf86GetBppFromDepth(pScrn, pScrn->depth); - - /* - * This must happen after pScrn->display has been set because -@@ -623,7 +624,7 @@ - if(!miCreateDefColormap(pScreen)) - return FALSE; - -- if (!xf86HandleColormaps(pScreen, 256, pScrn->rgbBits, -+ if (!xf86HandleColormaps(pScreen, 1024, pScrn->rgbBits, - DUMMYLoadPalette, NULL, - CMAP_PALETTED_TRUECOLOR - | CMAP_RELOAD_ON_MODE_SWITCH)) diff --git a/Golden_Repo/x/XServer/XServer-1.20.9-GCCcore-10.3.0.eb b/Golden_Repo/x/XServer/XServer-1.20.9-GCCcore-10.3.0.eb deleted file mode 100644 index 26f193b6f071f2261d4b6b5a8d3a7e5705e30cb1..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XServer/XServer-1.20.9-GCCcore-10.3.0.eb +++ /dev/null @@ -1,258 +0,0 @@ -easyblock = 'Bundle' - -name = 'XServer' -version = '1.20.9' - -homepage = 'https://www.x.org' - -description = """ -XServer: X Window System display server. - -This module provides a stripped-down installation with minimal driver support. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = [ - 'https://www.x.org/archive/individual/xserver/', - 'https://www.x.org/archive/individual/driver/' -] - -# OS dependency should be preferred for security reasons -osdependencies = [ - ('openssl-devel', 'libssl-dev', 'libopenssl-devel'), -] - -builddependencies = [ - ('pkg-config', '0.29.2'), - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.36.1'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), -] - -dependencies = [ - ('libdrm', '2.4.106'), - ('OpenGL', '2020'), - ('pixman', '0.40.0'), - ('X11', '20200222'), - ('freetype', '2.10.1'), - ('fontconfig', '2.13.92'), - ('ncurses', '6.2'), - ('libepoxy', '1.5.4'), - ('eudev', '3.2.9'), -] - -local_font_preconfigopts = "export PKG_CONFIG_PATH=%(installdir)s/lib/pkgconfig:$PKG_CONFIG_PATH && " -local_font_preconfigopts += "export PATH=%(installdir)s/bin:$PATH && " -local_font_preconfigopts += "export FONTCONFIG_FILE=%(installdir)s/config/fontconfig/fonts.conf && " - -# https://github.com/freedesktop/xorg-xserver/blob/master/meson_options.txt -local_xorg_configopts = "-D default_font_path=%(installdir)s/share/fonts/X11 " -local_xorg_configopts += "-D xorg=true " -local_xorg_configopts += "-D xvfb=true " -local_xorg_configopts += "-D xnest=true " -local_xorg_configopts += "-D xephyr=true " -local_xorg_configopts += "-D dmx=true " -local_xorg_configopts += "-D udev=true " -local_xorg_configopts += "-D glamor=true " -local_xorg_configopts += "-D systemd_logind=false " -# local_xorg_configopts += "-D suid_wrapper=true " -local_xorg_configopts += "-D xkb_dir=%(installdir)s/share/X11/xkb " -# local_xorg_configopts += "-D xkb_output_dir=/var/lib/xkb " - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCE_TAR_GZ], - 'start_dir': '%(name)s-%(version)s', -} - -components = [ - ('fontconfig-config', '1.0.0', { - 'easyblock': 'Binary', - 'source_urls': ['https://gitlab.version.fz-juelich.de/goebbert1/fontconfig-config/-/archive/v%(version)s/'], - 'sources': ['%(name)s-v%(version)s.tar.gz'], - 'start_dir': '%(name)s-v%(version)s', - 'extract_sources': True, - 'install_cmd': ( - 'cp -a %(builddir)s/%(name)s-v%(version)s/* %(installdir)s/ && ' - 'sed -i \'s@$EBROOTXSERVER@\'"%(installdir)s"\'@g\' %(installdir)s/share/X11/xorg.conf.d/99-fonts.conf' - ), - 'checksums': [('sha256', '68544c183d153f34105fa08573174650bfe643a6d750bd9da4accac399d375db')], - # to activate this fontconfig you need to export FONTCONFIG_FILE=${EBROOTXSERVER}/config/fontconfig/fonts.conf - }), - ('mkfontscale', '1.2.1', { - 'source_urls': ['https://www.x.org/archive//individual/app/'], - 'checksums': ['e5b687029e44d0bd3ccd254a4da6a5cbfc40350aa8b43fcca16ef6e9b9bb9f22'], - }), - ('mkfontdir', '1.0.7', { - 'source_urls': ['https://www.x.org/archive//individual/app/'], - 'checksums': ['bccc5fb7af1b614eabe4a22766758c87bfc36d66191d08c19d2fa97674b7b5b7'], - }), - ('bdftopcf', '1.1', { - 'source_urls': ['https://www.x.org/archive//individual/app/'], - 'checksums': ['699d1a62012035b1461c7f8e3f05a51c8bd6f28f348983249fb89bbff7309b47'], - }), - ('font-util', '1.3.2', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'f115a3735604de1e852a4bf669be0269d8ce8f21f8e0e74ec5934b31dadc1e76')], - }), - ('encodings', '1.0.5', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '66f524ab53acdd0823265e1b1a894f8652c928ae75a18b39aafd6a2d4a5577b0')], - }), - ('font-alias', '1.0.4', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '49525fa6f2c3f3b54f461b2e0649b0ac61af50c36bf40069355a25ced8ce2028')], - }), - ('dejavu', '2.37', { - 'easyblock': 'Binary', - 'source_urls': [SOURCEFORGE_SOURCE], - 'sources': ['%(name)s-fonts-ttf-%(version)s.tar.bz2'], - 'extract_sources': True, - 'start_dir': 'dejavu-fonts-ttf-2.37', - 'install_cmd': ('install -v -d -m755 %(installdir)s/share/fonts/dejavu && ' - 'install -v -m644 ttf/*.ttf %(installdir)s/share/fonts/dejavu'), - 'checksums': [('sha256', 'fa9ca4d13871dd122f61258a80d01751d603b4d3ee14095d65453b4e846e17d7')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-adobe-75dpi', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '61eb1fcfec89f7435cb92cd68712fbe4ba412ca562b1f5feec1f6daa1b8544f6')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-adobe-100dpi', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '97d9c1e706938838e4134d74f0836ae9d9ca6705ecb405c9a0ac8fdcbd9c2159')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-cursor-misc', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'a0b146139363dd0a704c7265ff9cd9150d4ae7c0d248091a9a42093e1618c427')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-adobe-utopia-type1', '1.0.4', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'd9e86a8805b0fb78222409169d839a8531a1f5c7284ee117ff2a0af2e5016c3f')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-misc-misc', '1.1.2', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '46142c876e176036c61c0c24c0a689079704d5ca5b510d48c025861ee2dbf829')], - 'preconfigopts': local_font_preconfigopts, - }), - # Fails because it tries to create a directory outside of %(installdir)s - # => test -z "<PKG_CONFIG-Fontconfig>/etc/fonts/conf.avail" || - # /usr/bin/mkdir -p "<PKG_CONFIG-Fontconfig>/etc/fonts/conf.avail" - # ('font-bh-ttf', '1.0.3', { - # 'source_urls': ['https://www.x.org/pub/individual/font/'], - # 'sources': ['%(name)s-%(version)s.tar.gz'], - # 'checksums': [('sha256', 'c583b4b968ffae6ea30d5b74041afeac83126682c490a9624b770d60d0e63d59')], - # 'preconfigopts': local_font_preconfigopts, - # }), - ('font-bh-type1', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'd5602f1d749ccd31d3bc1bb6f0c5d77400de0e5e3ac5abebd2a867aa2a4081a4')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-ibm-type1', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '4509703e9e581061309cf4823bffd4a93f10f48fe192a1d8be1f183fd6ab9711')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-misc-ethiopic', '1.0.4', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'f7901250fb746815065cfe13a814d92260348fede28d61dcab0d05c5d8eafd54')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-xfree86-type1', '1.0.4', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '02b3839ae79ba6a7750525bb3b0c281305664b95bf63b4a0baa230a277b4f928')], - 'preconfigopts': local_font_preconfigopts, - }), - ('xkbcomp', '1.4.4', { - 'source_urls': ['https://www.x.org/archive//individual/app/'], - 'checksums': ['159fba6b62ef4a3fb16ef7fc4eb4fc26f3888652471ceb604c495783dda020bc'], - }), - ('xkeyboard-config', '2.31', { - 'source_urls': ['https://www.x.org/archive//individual/data/xkeyboard-config/'], - 'checksums': ['5ac6b5b661aeb9d0ea84ad961cbbdd8fdf2503d6e8ca65ca1b1c5d9aea2ddc52'], - 'configopts': '--with-xkb-rules-symlink=xorg', - }), - ('xauth', '1.1', { - 'source_urls': ['https://www.x.org/releases/individual/app/'], - 'checksums': ['e9fce796c8c5c9368594b9e8bbba237fb54b6615f5fd60e8d0a5b3c52a92c5ef'], - }), - ('xorg-server', version, { - 'easyblock': 'MesonNinja', - 'patches': [('xvfb-run', '.')], - 'checksums': [ - ('sha256', '067c348fe1a86a1924010354c1c7cf1eaa9e43866e48540aa56a465f2a341ddc'), - ('sha256', 'fd6d13182b77871d4f65fccdaebb8a72387a726426066d3f8e6aa26b010ea0e8'), - ], - 'configopts': local_xorg_configopts, - }), - ('xf86-video-dummy', '0.3.8', { - 'preconfigopts': 'PKG_CONFIG_PATH=$PKG_CONFIG_PATH:%(installdir)s/lib/pkgconfig', - 'checksums': [('sha256', 'ee5ad51e80c8cc90d4c76ac3dec2269a3c769f4232ed418b29d60d618074631b')], - 'patches': [ - '0002-Constant-DPI.patch', - '0003-fix-pointer-limits.patch', - '0005-support-for-30-bit-depth-in-dummy-driver.patch' - ], - }), - ('xterm', '362', { - 'source_urls': ['http://invisible-mirror.net/archives/xterm/'], - 'sources': ['%(name)s-%(version)s.tgz'], - 'checksums': [('sha256', '1d4ffe226fa8f021859bbc3007788ff63a46a31242d9bd9a7bd7ebe24e81aca2')], - 'patches': ['xterm-cursesloc.patch'], - 'configopts': " --with-app-defaults=%(installdir)s/app-defaults ", - }), -] - -# we need to set the permissions our self to ensure no-one messes in this directory -# FIXME: easybuild does not support this in 4.3.0 -> hence you have to do it manually -skipsteps = ['permissions'] -postinstallcmds = [ - 'chmod -R ugo-w %(installdir)s/config', - 'chmod -R ugo-w %(installdir)s/share', - 'cp -a %(builddir)s/xorg-server-%(version)s/xvfb-run %(installdir)s/bin/', - 'chmod u+x %(installdir)s/bin/xvfb-run', -] - -modextrapaths = { - 'XDG_CONFIG_DIRS': 'config', - 'XUSERFILESEARCHPATH': 'app-defaults/%N-%C', -} - -# FONTCONFIG_FILE is used to override the default configuration file -modextravars = {'FONTCONFIG_FILE': '%(installdir)s/config/fontconfig/fonts.conf'} - -sanity_check_paths = { - 'files': ['bin/Xorg', 'bin/Xvfb', 'bin/xvfb-run', - 'lib/xorg/modules/drivers/dummy_drv.la', 'lib/xorg/modules/drivers/dummy_drv.so', - 'bin/xterm'], - 'dirs': [], -} - -sanity_check_commands = [ - "xvfb-run --help", - "xvfb-run --error-file %(builddir)s/xvfb-run-test.err echo hello", -] - -moduleclass = 'vis' diff --git a/Golden_Repo/x/XServer/XServer-1.20.9-GCCcore-9.3.0.eb b/Golden_Repo/x/XServer/XServer-1.20.9-GCCcore-9.3.0.eb deleted file mode 100644 index 17064282b0de7005fcc841dec6da3650fed4c621..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XServer/XServer-1.20.9-GCCcore-9.3.0.eb +++ /dev/null @@ -1,251 +0,0 @@ -easyblock = 'Bundle' - -name = 'XServer' -version = '1.20.9' - -homepage = 'https://www.x.org' - -description = """ -XServer: X Window System display server. - -This module provides a stripped-down installation with minimal driver support. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = [ - 'https://www.x.org/archive/individual/xserver/', - 'https://www.x.org/archive/individual/driver/' -] - -# OS dependency should be preferred for security reasons -osdependencies = [ - ('openssl-devel', 'libssl-dev', 'libopenssl-devel'), -] - -builddependencies = [ - ('pkg-config', '0.29.2'), - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.34'), - ('Meson', '0.55.0', '-Python-3.8.5'), - ('Ninja', '1.10.0'), -] - -dependencies = [ - ('libdrm', '2.4.99'), - ('OpenGL', '2020'), - ('pixman', '0.40.0'), - ('X11', '20200222'), - ('freetype', '2.10.1'), - ('fontconfig', '2.13.92'), - ('ncurses', '6.2'), - ('libepoxy', '1.5.4'), - ('eudev', '3.2.9'), -] - -local_font_preconfigopts = "export PKG_CONFIG_PATH=%(installdir)s/lib/pkgconfig:$PKG_CONFIG_PATH && " -local_font_preconfigopts += "export PATH=%(installdir)s/bin:$PATH && " - -# https://github.com/freedesktop/xorg-xserver/blob/master/meson_options.txt -local_xorg_configopts = "-D default_font_path=%(installdir)s/share/fonts/X11 " -local_xorg_configopts += "-D xorg=true " -local_xorg_configopts += "-D xvfb=true " -local_xorg_configopts += "-D xnest=true " -local_xorg_configopts += "-D xephyr=true " -local_xorg_configopts += "-D dmx=true " -local_xorg_configopts += "-D udev=true " -local_xorg_configopts += "-D glamor=true " -local_xorg_configopts += "-D systemd_logind=false " -# local_xorg_configopts += "-D suid_wrapper=true " -local_xorg_configopts += "-D xkb_dir=%(installdir)s/share/X11/xkb " -# local_xorg_configopts += "-D xkb_output_dir=/var/lib/xkb " - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCE_TAR_GZ], - 'start_dir': '%(name)s-%(version)s', -} - -components = [ - ('fontconfig-config', '1.0.0', { - 'easyblock': 'Binary', - 'source_urls': ['https://gitlab.version.fz-juelich.de/goebbert1/fontconfig-config/-/archive/v%(version)s/'], - 'sources': ['%(name)s-v%(version)s.tar.gz'], - 'start_dir': '%(name)s-v%(version)s', - 'extract_sources': True, - 'install_cmd': ( - 'cp -a %(builddir)s/%(name)s-v%(version)s/* %(installdir)s/ && ' - 'sed -i \'s@$EBROOTXSERVER@\'"%(installdir)s"\'@g\' %(installdir)s/share/X11/xorg.conf.d/99-fonts.conf' - ), - 'checksums': [('sha256', '68544c183d153f34105fa08573174650bfe643a6d750bd9da4accac399d375db')], - # to activate this fontconfig you need to export FONTCONFIG_FILE=${EBROOTXSERVER}/config/fontconfig/fonts.conf - }), - ('mkfontscale', '1.2.1', { - 'source_urls': ['https://www.x.org/archive//individual/app/'], - 'checksums': ['e5b687029e44d0bd3ccd254a4da6a5cbfc40350aa8b43fcca16ef6e9b9bb9f22'], - }), - ('mkfontdir', '1.0.7', { - 'source_urls': ['https://www.x.org/archive//individual/app/'], - 'checksums': ['bccc5fb7af1b614eabe4a22766758c87bfc36d66191d08c19d2fa97674b7b5b7'], - }), - ('bdftopcf', '1.1', { - 'source_urls': ['https://www.x.org/archive//individual/app/'], - 'checksums': ['699d1a62012035b1461c7f8e3f05a51c8bd6f28f348983249fb89bbff7309b47'], - }), - ('font-util', '1.3.2', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'f115a3735604de1e852a4bf669be0269d8ce8f21f8e0e74ec5934b31dadc1e76')], - }), - ('encodings', '1.0.5', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '66f524ab53acdd0823265e1b1a894f8652c928ae75a18b39aafd6a2d4a5577b0')], - }), - ('font-alias', '1.0.4', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '49525fa6f2c3f3b54f461b2e0649b0ac61af50c36bf40069355a25ced8ce2028')], - }), - ('dejavu', '2.37', { - 'easyblock': 'Binary', - 'source_urls': [SOURCEFORGE_SOURCE], - 'sources': ['%(name)s-fonts-ttf-%(version)s.tar.bz2'], - 'extract_sources': True, - 'start_dir': 'dejavu-fonts-ttf-2.37', - 'install_cmd': ('install -v -d -m755 %(installdir)s/share/fonts/dejavu && ' - 'install -v -m644 ttf/*.ttf %(installdir)s/share/fonts/dejavu'), - 'checksums': [('sha256', 'fa9ca4d13871dd122f61258a80d01751d603b4d3ee14095d65453b4e846e17d7')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-adobe-75dpi', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '61eb1fcfec89f7435cb92cd68712fbe4ba412ca562b1f5feec1f6daa1b8544f6')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-adobe-100dpi', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '97d9c1e706938838e4134d74f0836ae9d9ca6705ecb405c9a0ac8fdcbd9c2159')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-cursor-misc', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'a0b146139363dd0a704c7265ff9cd9150d4ae7c0d248091a9a42093e1618c427')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-adobe-utopia-type1', '1.0.4', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'd9e86a8805b0fb78222409169d839a8531a1f5c7284ee117ff2a0af2e5016c3f')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-misc-misc', '1.1.2', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '46142c876e176036c61c0c24c0a689079704d5ca5b510d48c025861ee2dbf829')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-bh-ttf', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'c583b4b968ffae6ea30d5b74041afeac83126682c490a9624b770d60d0e63d59')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-bh-type1', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'd5602f1d749ccd31d3bc1bb6f0c5d77400de0e5e3ac5abebd2a867aa2a4081a4')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-ibm-type1', '1.0.3', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '4509703e9e581061309cf4823bffd4a93f10f48fe192a1d8be1f183fd6ab9711')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-misc-ethiopic', '1.0.4', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', 'f7901250fb746815065cfe13a814d92260348fede28d61dcab0d05c5d8eafd54')], - 'preconfigopts': local_font_preconfigopts, - }), - ('font-xfree86-type1', '1.0.4', { - 'source_urls': ['https://www.x.org/pub/individual/font/'], - 'sources': ['%(name)s-%(version)s.tar.gz'], - 'checksums': [('sha256', '02b3839ae79ba6a7750525bb3b0c281305664b95bf63b4a0baa230a277b4f928')], - 'preconfigopts': local_font_preconfigopts, - }), - ('xkbcomp', '1.4.4', { - 'source_urls': ['https://www.x.org/archive//individual/app/'], - 'checksums': ['159fba6b62ef4a3fb16ef7fc4eb4fc26f3888652471ceb604c495783dda020bc'], - }), - ('xkeyboard-config', '2.31', { - 'source_urls': ['https://www.x.org/archive//individual/data/xkeyboard-config/'], - 'checksums': ['5ac6b5b661aeb9d0ea84ad961cbbdd8fdf2503d6e8ca65ca1b1c5d9aea2ddc52'], - 'configopts': '--with-xkb-rules-symlink=xorg', - }), - ('xauth', '1.1', { - 'source_urls': ['https://www.x.org/releases/individual/app/'], - 'checksums': ['e9fce796c8c5c9368594b9e8bbba237fb54b6615f5fd60e8d0a5b3c52a92c5ef'], - }), - ('xorg-server', version, { - 'easyblock': 'MesonNinja', - 'patches': [('xvfb-run', '.')], - 'checksums': [ - ('sha256', '067c348fe1a86a1924010354c1c7cf1eaa9e43866e48540aa56a465f2a341ddc'), - ('sha256', 'fd6d13182b77871d4f65fccdaebb8a72387a726426066d3f8e6aa26b010ea0e8'), - ], - 'configopts': local_xorg_configopts, - }), - ('xf86-video-dummy', '0.3.8', { - 'preconfigopts': 'PKG_CONFIG_PATH=$PKG_CONFIG_PATH:%(installdir)s/lib/pkgconfig', - 'checksums': [('sha256', 'ee5ad51e80c8cc90d4c76ac3dec2269a3c769f4232ed418b29d60d618074631b')], - 'patches': [ - '0002-Constant-DPI.patch', - '0003-fix-pointer-limits.patch', - '0005-support-for-30-bit-depth-in-dummy-driver.patch' - ], - }), - ('xterm', '362', { - 'source_urls': ['http://invisible-mirror.net/archives/xterm/'], - 'sources': ['%(name)s-%(version)s.tgz'], - 'checksums': [('sha256', '1d4ffe226fa8f021859bbc3007788ff63a46a31242d9bd9a7bd7ebe24e81aca2')], - 'patches': ['xterm-cursesloc.patch'], - 'configopts': " --with-app-defaults=${EBROOTX11}/app-defaults ", - }), -] - -# we need to set the permissions our self to ensure no-one messes in this directory -# FIXME: easybuild does not support this in 4.3.0 -> hence you have to do it manually -skipsteps = ['permissions'] -postinstallcmds = [ - 'chmod -R ugo-w %(installdir)s/config', - 'chmod -R ugo-w %(installdir)s/share', - 'cp -a %(builddir)s/xorg-server-%(version)s/xvfb-run %(installdir)s/bin/', - 'chmod u+x %(installdir)s/bin/xvfb-run', -] - -modextrapaths = {'XDG_CONFIG_DIRS': 'config'} - -# FONTCONFIG_FILE is used to override the default configuration file -modextravars = {'FONTCONFIG_FILE': '%(installdir)s/config/fontconfig/fonts.conf'} - -sanity_check_paths = { - 'files': ['bin/Xorg', 'bin/Xvfb', 'bin/xvfb-run', - 'lib/xorg/modules/drivers/dummy_drv.la', 'lib/xorg/modules/drivers/dummy_drv.so', - 'bin/xterm'], - 'dirs': [], -} - -sanity_check_commands = [ - "xvfb-run --help", - "xvfb-run --error-file %(builddir)s/xvfb-run-test.err echo hello", -] - -moduleclass = 'vis' diff --git a/Golden_Repo/x/XServer/xterm-cursesloc.patch b/Golden_Repo/x/XServer/xterm-cursesloc.patch deleted file mode 100644 index 033c2776b7838ae1907a69f31d75d7ce7d28175f..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XServer/xterm-cursesloc.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -Naur xterm-362.orig/xtermcap.h xterm-362/xtermcap.h ---- xterm-362.orig/xtermcap.h 2013-06-23 17:34:37.000000000 +0200 -+++ xterm-362/xtermcap.h 2021-02-16 14:00:48.976219215 +0100 -@@ -59,7 +59,7 @@ - #undef ERR /* workaround for glibc 2.1.3 */ - - #ifdef HAVE_NCURSES_CURSES_H --#include <ncurses/curses.h> -+#include <curses.h> - #else - #include <curses.h> - #endif diff --git a/Golden_Repo/x/XServer/xvfb-run b/Golden_Repo/x/XServer/xvfb-run deleted file mode 100644 index 237e0dfc5938430d50fe8d30fde1b6defa9c6e42..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XServer/xvfb-run +++ /dev/null @@ -1,194 +0,0 @@ -#!/bin/sh - -# extracted from Debian package for xorg-server, -# via http://deb.debian.org/debian/pool/main/x/xorg-server/xorg-server_1.20.4-1.diff.gz - -# This script starts an instance of Xvfb, the "fake" X server, runs a command -# with that server available, and kills the X server when done. The return -# value of the command becomes the return value of this script. -# -# If anyone is using this to build a Debian package, make sure the package -# Build-Depends on xvfb and xauth. - -set -e - -PROGNAME=xvfb-run -SERVERNUM=99 -AUTHFILE= -ERRORFILE=/dev/null -XVFBARGS="-screen 0 1280x1024x24" -LISTENTCP="-nolisten tcp" -XAUTHPROTO=. - -# Query the terminal to establish a default number of columns to use for -# displaying messages to the user. This is used only as a fallback in the event -# the COLUMNS variable is not set. ($COLUMNS can react to SIGWINCH while the -# script is running, and this cannot, only being calculated once.) -DEFCOLUMNS=$(stty size 2>/dev/null | awk '{print $2}') || true -case "$DEFCOLUMNS" in - *[!0-9]*|'') DEFCOLUMNS=80 ;; -esac - -# Display a message, wrapping lines at the terminal width. -message () { - echo "$PROGNAME: $*" | fmt -t -w ${COLUMNS:-$DEFCOLUMNS} -} - -# Display an error message. -error () { - message "error: $*" >&2 -} - -# Display a usage message. -usage () { - if [ -n "$*" ]; then - message "usage error: $*" - fi - cat <<EOF -Usage: $PROGNAME [OPTION ...] COMMAND -Run COMMAND (usually an X client) in a virtual X server environment. -Options: --a --auto-servernum try to get a free server number, starting at - --server-num --e FILE --error-file=FILE file used to store xauth errors and Xvfb - output (default: $ERRORFILE) --f FILE --auth-file=FILE file used to store auth cookie - (default: ./.Xauthority) --h --help display this usage message and exit --n NUM --server-num=NUM server number to use (default: $SERVERNUM) --l --listen-tcp enable TCP port listening in the X server --p PROTO --xauth-protocol=PROTO X authority protocol name to use - (default: xauth command's default) --s ARGS --server-args=ARGS arguments (other than server number and - "-nolisten tcp") to pass to the Xvfb server - (default: "$XVFBARGS") -EOF -} - -# Find a free server number by looking at .X*-lock files in /tmp. -find_free_servernum() { - # Sadly, the "local" keyword is not POSIX. Leave the next line commented in - # the hope Debian Policy eventually changes to allow it in /bin/sh scripts - # anyway. - #local i - - i=$SERVERNUM - while [ -f /tmp/.X$i-lock ]; do - i=$(($i + 1)) - done - echo $i -} - -# Clean up files -clean_up() { - if [ -e "$AUTHFILE" ]; then - XAUTHORITY=$AUTHFILE xauth remove ":$SERVERNUM" >>"$ERRORFILE" 2>&1 - fi - if [ -n "$XVFB_RUN_TMPDIR" ]; then - if ! rm -r "$XVFB_RUN_TMPDIR"; then - error "problem while cleaning up temporary directory" - exit 5 - fi - fi - if [ -n "$XVFBPID" ]; then - kill "$XVFBPID" >>"$ERRORFILE" 2>&1 - fi -} - -# Parse the command line. -ARGS=$(getopt --options +ae:f:hn:lp:s:w: \ - --long auto-servernum,error-file:,auth-file:,help,server-num:,listen-tcp,xauth-protocol:,server-args:,wait: \ - --name "$PROGNAME" -- "$@") -GETOPT_STATUS=$? - -if [ $GETOPT_STATUS -ne 0 ]; then - error "internal error; getopt exited with status $GETOPT_STATUS" - exit 6 -fi - -eval set -- "$ARGS" - -while :; do - case "$1" in - -a|--auto-servernum) SERVERNUM=$(find_free_servernum); AUTONUM="yes" ;; - -e|--error-file) ERRORFILE="$2"; shift ;; - -f|--auth-file) AUTHFILE="$2"; shift ;; - -h|--help) SHOWHELP="yes" ;; - -n|--server-num) SERVERNUM="$2"; shift ;; - -l|--listen-tcp) LISTENTCP="" ;; - -p|--xauth-protocol) XAUTHPROTO="$2"; shift ;; - -s|--server-args) XVFBARGS="$2"; shift ;; - -w|--wait) shift ;; - --) shift; break ;; - *) error "internal error; getopt permitted \"$1\" unexpectedly" - exit 6 - ;; - esac - shift -done - -if [ "$SHOWHELP" ]; then - usage - exit 0 -fi - -if [ -z "$*" ]; then - usage "need a command to run" >&2 - exit 2 -fi - -if ! command -v xauth >/dev/null; then - error "xauth command not found" - exit 3 -fi - -# tidy up after ourselves -trap clean_up EXIT - -# If the user did not specify an X authorization file to use, set up a temporary -# directory to house one. -if [ -z "$AUTHFILE" ]; then - XVFB_RUN_TMPDIR="$(mktemp -d -t $PROGNAME.XXXXXX)" - AUTHFILE="$XVFB_RUN_TMPDIR/Xauthority" - # Create empty file to avoid xauth warning - touch "$AUTHFILE" -fi - -# Start Xvfb. -MCOOKIE=$(mcookie) -tries=10 -while [ $tries -gt 0 ]; do - tries=$(( $tries - 1 )) - XAUTHORITY=$AUTHFILE xauth source - << EOF >>"$ERRORFILE" 2>&1 -add :$SERVERNUM $XAUTHPROTO $MCOOKIE -EOF - # handle SIGUSR1 so Xvfb knows to send a signal when it's ready to accept - # connections - trap : USR1 - (trap '' USR1; exec Xvfb ":$SERVERNUM" $XVFBARGS $LISTENTCP -auth $AUTHFILE >>"$ERRORFILE" 2>&1) & - XVFBPID=$! - - wait || : - if kill -0 $XVFBPID 2>/dev/null; then - break - elif [ -n "$AUTONUM" ]; then - # The display is in use so try another one (if '-a' was specified). - SERVERNUM=$((SERVERNUM + 1)) - SERVERNUM=$(find_free_servernum) - continue - fi - error "Xvfb failed to start" >&2 - XVFBPID= - exit 1 -done - -# Start the command and save its exit status. -set +e -DISPLAY=:$SERVERNUM XAUTHORITY=$AUTHFILE "$@" -RETVAL=$? -set -e - -# Return the executed command's exit status. -exit $RETVAL - -# vim:set ai et sts=4 sw=4 tw=80: diff --git a/Golden_Repo/x/XZ/XZ-5.2.5-GCCcore-10.3.0.eb b/Golden_Repo/x/XZ/XZ-5.2.5-GCCcore-10.3.0.eb deleted file mode 100644 index 19d7259d86c8abe2f3a6fe57884e4cb3030583dc..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XZ/XZ-5.2.5-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'XZ' -version = '5.2.5' - -homepage = 'https://tukaani.org/xz/' -description = "xz: XZ utilities" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://tukaani.org/xz/'] -sources = [SOURCELOWER_TAR_BZ2] -patches = ['XZ-5.2.2_compat-libs.patch'] -checksums = [ - '5117f930900b341493827d63aa910ff5e011e0b994197c3b71c08a20228a42df', # xz-5.2.5.tar.bz2 - # XZ-5.2.2_compat-libs.patch - '578da3ea2ddb551972891a60fe31478b16a516d6ea8b6aa3af89e1d558adb703', -] - -builddependencies = [ - # use gettext built with system toolchain as build dep to avoid cyclic dependency (XZ -> gettext -> libxml2 -> XZ) - ('gettext', '0.20.2', '', SYSTEM), - ('binutils', '2.36.1'), -] - -# may become useful in non-x86 archs -# configopts = ' --disable-assembler ' - -sanity_check_paths = { - 'files': ['bin/lzmainfo', 'bin/unxz', 'bin/xz'], - 'dirs': [] -} - -sanity_check_commands = [ - "xz --help", - "unxz --help", -] - -moduleclass = 'tools' diff --git a/Golden_Repo/x/XZ/XZ-5.2.5-GCCcore-9.3.0.eb b/Golden_Repo/x/XZ/XZ-5.2.5-GCCcore-9.3.0.eb deleted file mode 100644 index a887ae22eedb09ca8638ca7a361dea74c84706c0..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/XZ/XZ-5.2.5-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'XZ' -version = '5.2.5' - -homepage = 'https://tukaani.org/xz/' -description = "xz: XZ utilities" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://tukaani.org/xz/'] -sources = [SOURCELOWER_TAR_BZ2] -patches = ['XZ-5.2.2_compat-libs.patch'] -checksums = [ - '5117f930900b341493827d63aa910ff5e011e0b994197c3b71c08a20228a42df', # xz-5.2.5.tar.bz2 - '578da3ea2ddb551972891a60fe31478b16a516d6ea8b6aa3af89e1d558adb703', # XZ-5.2.2_compat-libs.patch -] - -builddependencies = [ - # use gettext built with system toolchain as build dep to avoid cyclic dependency (XZ -> gettext -> libxml2 -> XZ) - ('gettext', '0.20.2', '', SYSTEM), - ('binutils', '2.34'), -] - -# may become useful in non-x86 archs -# configopts = ' --disable-assembler ' - -sanity_check_paths = { - 'files': ["bin/xz", "bin/lzmainfo"], - 'dirs': [] -} - -moduleclass = 'tools' diff --git a/Golden_Repo/x/x264/x264-20200912-GCCcore-10.3.0.eb b/Golden_Repo/x/x264/x264-20200912-GCCcore-10.3.0.eb deleted file mode 100644 index 8e22e82e8ab66eaa53acb078dcfdc6febbaef20f..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/x264/x264-20200912-GCCcore-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -# Built with EasyBuild version 4.4.0 on 2021-06-21_11-34-32 -easyblock = 'ConfigureMake' - -name = 'x264' -version = '20200912' - -homepage = 'http://www.videolan.org/developers/x264.html' -description = """x264 is a free software library and application for encoding video streams into the H.264/MPEG-4 - AVC compression format, and is released under the terms of the GNU GPL. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://code.videolan.org/videolan/x264/-/archive/d198931a63049db1f2c92d96c34904c69fde8117/'] -sources = ['x264-d198931a63049db1f2c92d96c34904c69fde8117.tar.gz'] -checksums = ['31dc10bf77dcf7359094c1837359ff151dbc4bcf2536d76729ef9ad88bf7c194'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('NASM', '2.15.03'), -] - -configopts = " --enable-shared --enable-static --bashcompletionsdir=%(installdir)s/share/bash-completion/completions " - -sanity_check_paths = { - 'files': ['bin/x264', 'include/x264_config.h', 'include/x264.h', 'lib/libx264.a', 'lib/libx264.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/x/x264/x264-20200912-GCCcore-9.3.0.eb b/Golden_Repo/x/x264/x264-20200912-GCCcore-9.3.0.eb deleted file mode 100644 index d2e628805895ddd1927ce73778c0ed6a46ffae08..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/x264/x264-20200912-GCCcore-9.3.0.eb +++ /dev/null @@ -1,36 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'x264' -version = '20200912' - -homepage = 'http://www.videolan.org/developers/x264.html' -description = """x264 is a free software library and application for encoding video streams into the H.264/MPEG-4 - AVC compression format, and is released under the terms of the GNU GPL. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://code.videolan.org/videolan/x264/-/archive/d198931a63049db1f2c92d96c34904c69fde8117/'] -sources = ['x264-d198931a63049db1f2c92d96c34904c69fde8117.tar.gz'] -checksums = ['31dc10bf77dcf7359094c1837359ff151dbc4bcf2536d76729ef9ad88bf7c194'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('NASM', '2.15.03'), -] - -configopts = " --enable-shared --enable-static --bashcompletionsdir=%(installdir)s/share/bash-completion/completions " - -sanity_check_paths = { - 'files': ['bin/x264', 'include/x264_config.h', 'include/x264.h', 'lib/libx264.a', 'lib/libx264.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/x/x265/x265-3.4-GCCcore-10.3.0.eb b/Golden_Repo/x/x265/x265-3.4-GCCcore-10.3.0.eb deleted file mode 100644 index ec280bfe2e0df58824a4c9c007f4c9d43a92a38e..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/x265/x265-3.4-GCCcore-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -# Built with EasyBuild version 4.4.0 on 2021-06-21_13-49-26 -easyblock = 'CMakeMake' - -name = 'x265' -version = '3.4' - -homepage = 'https://www.videolan.org/developers/x265.html' -description = """x265 is a free software library and application for encoding video streams - into the H.265/MPEG-H HEVC compression format, and is released under the terms of the GNU GPL. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/videolan/x265/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['544d147bf146f8994a7bf8521ed878c93067ea1c7c6e93ab602389be3117eaaf'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('NASM', '2.15.03'), -] - -separate_build_dir = True -srcdir = '../x265-%(version)s/source' - -configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' - -sanity_check_paths = { - 'files': ['bin/x265', 'include/x265_config.h', 'include/x265.h', 'lib/libx265.a', 'lib/libx265.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/x/x265/x265-3.4-GCCcore-9.3.0.eb b/Golden_Repo/x/x265/x265-3.4-GCCcore-9.3.0.eb deleted file mode 100644 index 30a8179083c3e1a57f8651322b2c5b44ca4a6f23..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/x265/x265-3.4-GCCcore-9.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'x265' -version = '3.4' - -homepage = 'https://www.videolan.org/developers/x265.html' -description = """x265 is a free software library and application for encoding video streams - into the H.265/MPEG-H HEVC compression format, and is released under the terms of the GNU GPL. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/videolan/x265/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['544d147bf146f8994a7bf8521ed878c93067ea1c7c6e93ab602389be3117eaaf'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('NASM', '2.15.03'), -] - -separate_build_dir = True -srcdir = '../x265-%(version)s/source' - -configopts = '-DCMAKE_VERBOSE_MAKEFILE=ON ' - -sanity_check_paths = { - 'files': ['bin/x265', 'include/x265_config.h', 'include/x265.h', 'lib/libx265.a', 'lib/libx265.%s' % SHLIB_EXT], - 'dirs': [], -} -moduleclass = 'vis' diff --git a/Golden_Repo/x/xdg-user-dirs/xdg-user-dirs-0.17-GCCcore-10.3.0.eb b/Golden_Repo/x/xdg-user-dirs/xdg-user-dirs-0.17-GCCcore-10.3.0.eb deleted file mode 100644 index a976e4cb99c9a34b151eedb0993ca1b2a02b0605..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/xdg-user-dirs/xdg-user-dirs-0.17-GCCcore-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'xdg-user-dirs' -version = '0.17' - -homepage = 'https://freedesktop.org/wiki/Software/xdg-user-dirs' -description = """ - xdg-user-dirs is a tool to help manage "well known" user directories like the desktop folder - and the music folder. It also handles localization (i.e. translation) of the filenames. - """ - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['http://user-dirs.freedesktop.org/releases/'] -sources = ['xdg-user-dirs-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -configopts = "--disable-documentation " - -sanity_check_paths = { - 'files': ['bin/xdg-user-dir'], - 'dirs': ['bin'] -} - - -moduleclass = 'vis' diff --git a/Golden_Repo/x/xorg-macros/xorg-macros-1.19.2-GCCcore-10.3.0.eb b/Golden_Repo/x/xorg-macros/xorg-macros-1.19.2-GCCcore-10.3.0.eb deleted file mode 100644 index ad114d31c74e468607aeac783e76e1c4f88a6434..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/xorg-macros/xorg-macros-1.19.2-GCCcore-10.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -## -# This is a contribution from DeepThought HPC Service, Flinders University, Adelaide, Australia -# Homepage: https://staff.flinders.edu.au/research/deep-thought -# -# Authors:: Robert Qiao <rob.qiao@flinders.edu.au> -# License:: Custom -# -# Notes:: -## - -easyblock = 'ConfigureMake' - -name = 'xorg-macros' -version = '1.19.2' - -homepage = 'https://cgit.freedesktop.org/xorg/util/macros' -description = """X.org macros utilities.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://gitlab.freedesktop.org/xorg/util/macros/-/archive/util-macros-%(version)s'] -sources = ['macros-util-macros-%(version)s.tar.gz'] -checksums = ['326e51a5d673e3d9cc7f139aee469a11e18ea060b1d06c22694612e68a2089b1'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('Autotools', '20200321'), -] - -preconfigopts = './autogen.sh && ' - -sanity_check_paths = { - 'files': ['share/pkgconfig/xorg-macros.pc'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/x/xorg-macros/xorg-macros-1.19.2-GCCcore-9.3.0.eb b/Golden_Repo/x/xorg-macros/xorg-macros-1.19.2-GCCcore-9.3.0.eb deleted file mode 100644 index 5117c93c5eb9089849bd06d3a23fc6a2e9f2ad0e..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/xorg-macros/xorg-macros-1.19.2-GCCcore-9.3.0.eb +++ /dev/null @@ -1,39 +0,0 @@ -## -# This is a contribution from DeepThought HPC Service, Flinders University, Adelaide, Australia -# Homepage: https://staff.flinders.edu.au/research/deep-thought -# -# Authors:: Robert Qiao <rob.qiao@flinders.edu.au> -# License:: Custom -# -# Notes:: -## - -easyblock = 'ConfigureMake' - -name = 'xorg-macros' -version = '1.19.2' - -homepage = 'https://cgit.freedesktop.org/xorg/util/macros' -description = """X.org macros utilities.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://gitlab.freedesktop.org/xorg/util/macros/-/archive/util-macros-%(version)s'] -sources = ['macros-util-macros-%(version)s.tar.gz'] -checksums = ['326e51a5d673e3d9cc7f139aee469a11e18ea060b1d06c22694612e68a2089b1'] - -builddependencies = [ - ('binutils', '2.34'), - ('Autotools', '20200321'), -] - -preconfigopts = './autogen.sh && ' - -sanity_check_paths = { - 'files': ['share/pkgconfig/xorg-macros.pc'], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/x/xpra/xpra-4.0.4-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/x/xpra/xpra-4.0.4-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index ad24c053a526e2ce17b93934b9fe0f6df44a8d49..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/xpra/xpra-4.0.4-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,111 +0,0 @@ -easyblock = 'Bundle' - -name = 'xpra' -version = '4.0.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = "http://www.xpra.org" -description = """Xpra is an open-source multi-platform persistent remote display server and client -for forwarding applications and desktop screens. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('X11', '20200222'), - ('SciPy-Stack', '2021', versionsuffix, ('gcccoremkl', '10.3.0-2021.2.0')), - ('PyCairo', '1.18.2', '-Python-%(pyver)s'), - ('PyGObject', '3.34.0', '-Python-%(pyver)s'), - ('GTK+', '3.24.17'), - ('rencode', '1.0.5', '-Python-%(pyver)s'), - ('lz4', '3.1.0', '-Python-%(pyver)s'), - ('yuicompressor', '2.4.8', '-Python-%(pyver)s'), - ('x264', '20200912'), - ('x265', '3.4'), - ('libvpx', '1.9.0'), - ('FFmpeg', '4.4'), - ('libwebp', '1.1.0'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('zlib', '1.2.11'), - ('LibTIFF', '4.1.0'), - ('freetype', '2.10.1'), - ('libyuv', '20201024'), - ('uglifyjs', '3.11.4'), - ('DBus', '1.12.20'), - ('XServer', '1.20.9'), -] - -components = [ - ('xpra-html5', '4.0.4', { - 'easyblock': 'CmdCp', - 'source_urls': ['http://xpra.org/src/'], - 'sources': ['%(name)s-%(version)s.tar.bz2'], - 'checksums': [('sha256', 'c294f80016a2cbf8878673cba7884ac2ffcaf5495141975f65eb4e93a08623db')], - 'cmds_map': [('.*', 'tar -xjf %(source)s')], - 'files_to_copy': ['%(name)s-%(version)s/*'], - }), -] - -prebuildopts = 'export CFLAGS="-Wno-error=unused-function" && ' - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_default_options = { - 'use_pip': True, - 'source_urls': [PYPI_SOURCE], - 'sanity_pip_check': True, - 'use_pip_for_deps': False, - 'download_dep_fail': True, -} - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_list = [ - ('pyinotify', '0.9.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9c998a5d7606ca835065cdabc013ae6c66eb9ea76a00a1e3bc6e0cfe2b4f71f4')]), - ])), - ('dbus-python', '1.2.16', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '11238f1d86c995d8aed2e22f04a1e3779f0d70e587caffeab4857f3c662ed5a4')]), - ('modulename', 'dbus'), - ])), - ('pyxdg', '0.27', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '80bd93aae5ed82435f20462ea0208fb198d8eec262e831ee06ce9ddb6b91c5a5')]), - ('modulename', 'xdg'), - ])), - ('python-uinput', '0.11.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '99392b676c77b5795b86b7d75274db33fe754fd1e06fb3d58b167c797dc47f0c')]), - ('modulename', 'uinput'), - ])), - ('xpra', '4.0.4', dict(list(local_common_opts.items()) + [ - ('source_urls', ['http://xpra.org/src/']), - ('source_tmpl', '%(name)s-%(version)s.tar.bz2'), - ('checksums', [('sha256', 'a443282225d2498fe372ebc01bebf884fdecaf4d181a7053be420424a2b6dde7')]), - ('patches', ['xpra-4.0.4-use_Xorg_on_PATH_first.patch']), - ('use_pip', False), - ('installopts', '--with-tests --without-service'), - ])), -] - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages', 'lib64/python%(pyshortver)s/site-packages'], -} - -sanity_check_paths = { - 'files': ['bin/xpra'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/x/xpra/xpra-4.0.4-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/x/xpra/xpra-4.0.4-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index e691b0edffb7e4623457dc7337e415c649834d6f..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/xpra/xpra-4.0.4-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,111 +0,0 @@ -easyblock = 'Bundle' - -name = 'xpra' -version = '4.0.4' -versionsuffix = '-Python-%(pyver)s' - -homepage = "http://www.xpra.org" -description = """Xpra is an open-source multi-platform persistent remote display server and client -for forwarding applications and desktop screens. -""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('X11', '20200222'), - ('SciPy-Stack', '2020', '-Python-%(pyver)s', ('gcccoremkl', '9.3.0-2020.2.254')), - ('PyCairo', '1.18.2', '-Python-%(pyver)s'), - ('PyGObject', '3.34.0', '-Python-%(pyver)s'), - ('GTK+', '3.24.17'), - ('rencode', '1.0.5', '-Python-%(pyver)s'), - ('lz4', '3.1.0', '-Python-%(pyver)s'), - ('yuicompressor', '2.4.8', '-Python-%(pyver)s'), - ('x264', '20200912'), - ('x265', '3.4'), - ('libvpx', '1.9.0'), - ('FFmpeg', '4.3.1'), - ('libwebp', '1.1.0'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('zlib', '1.2.11'), - ('LibTIFF', '4.1.0'), - ('freetype', '2.10.1'), - ('libyuv', '20201024'), - ('uglifyjs', '3.11.4'), - ('DBus', '1.12.20'), - ('XServer', '1.20.9'), -] - -components = [ - ('xpra-html5', '4.0.4', { - 'easyblock': 'CmdCp', - 'source_urls': ['http://xpra.org/src/'], - 'sources': ['%(name)s-%(version)s.tar.bz2'], - 'checksums': [('sha256', 'c294f80016a2cbf8878673cba7884ac2ffcaf5495141975f65eb4e93a08623db')], - 'cmds_map': [('.*', 'tar -xjf %(source)s')], - 'files_to_copy': ['%(name)s-%(version)s/*'], - }), -] - -prebuildopts = 'export CFLAGS="-Wno-error=unused-function" && ' - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_default_options = { - 'use_pip': True, - 'source_urls': [PYPI_SOURCE], - 'sanity_pip_check': True, - 'use_pip_for_deps': False, - 'download_dep_fail': True, -} - -local_common_opts = { - 'req_py_majver': '3', - 'req_py_minver': '0' -} - -exts_list = [ - ('pyinotify', '0.9.6', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '9c998a5d7606ca835065cdabc013ae6c66eb9ea76a00a1e3bc6e0cfe2b4f71f4')]), - ])), - ('dbus-python', '1.2.16', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '11238f1d86c995d8aed2e22f04a1e3779f0d70e587caffeab4857f3c662ed5a4')]), - ('modulename', 'dbus'), - ])), - ('pyxdg', '0.27', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '80bd93aae5ed82435f20462ea0208fb198d8eec262e831ee06ce9ddb6b91c5a5')]), - ('modulename', 'xdg'), - ])), - ('python-uinput', '0.11.2', dict(list(local_common_opts.items()) + [ - ('checksums', [('sha256', '99392b676c77b5795b86b7d75274db33fe754fd1e06fb3d58b167c797dc47f0c')]), - ('modulename', 'uinput'), - ])), - ('xpra', '4.0.4', dict(list(local_common_opts.items()) + [ - ('source_urls', ['http://xpra.org/src/']), - ('source_tmpl', '%(name)s-%(version)s.tar.bz2'), - ('checksums', [('sha256', 'a443282225d2498fe372ebc01bebf884fdecaf4d181a7053be420424a2b6dde7')]), - ('patches', ['xpra-4.0.4-use_Xorg_on_PATH_first.patch']), - ('use_pip', False), - ('installopts', '--with-tests --without-service'), - ])), -] - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages', 'lib64/python%(pyshortver)s/site-packages'], -} - -sanity_check_paths = { - 'files': ['bin/xpra'], - 'dirs': ['lib/python%(pyshortver)s/site-packages'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/x/xpra/xpra-4.0.4-use_Xorg_on_PATH_first.patch b/Golden_Repo/x/xpra/xpra-4.0.4-use_Xorg_on_PATH_first.patch deleted file mode 100644 index dadb134920b0c02ef65a8ffc6f0f6a872d4a30d0..0000000000000000000000000000000000000000 --- a/Golden_Repo/x/xpra/xpra-4.0.4-use_Xorg_on_PATH_first.patch +++ /dev/null @@ -1,28 +0,0 @@ -diff -Naur xpra-4.0.4.orig/xpra/scripts/config.py xpra-4.0.4/xpra/scripts/config.py ---- xpra-4.0.4.orig/xpra/scripts/config.py 2020-09-27 20:11:39.000000000 +0200 -+++ xpra-4.0.4/xpra/scripts/config.py 2020-11-11 19:18:28.619408000 +0100 -@@ -60,6 +60,13 @@ - - def get_xorg_bin(): - # Detect Xorg Binary -+ -+ #look for it in $PATH: -+ for x in os.environ.get("PATH").split(os.pathsep): # pragma: no cover -+ xorg = os.path.join(x, "Xorg") -+ if os.path.isfile(xorg): -+ return xorg -+ - if is_arm() and is_Debian() and os.path.exists("/usr/bin/Xorg"): - #Raspbian breaks if we use a different binary.. - return "/usr/bin/Xorg" -@@ -72,11 +79,6 @@ - ): - if os.path.exists(p): - return p -- #look for it in $PATH: -- for x in os.environ.get("PATH").split(os.pathsep): # pragma: no cover -- xorg = os.path.join(x, "Xorg") -- if os.path.isfile(xorg): -- return xorg - return None - diff --git a/Golden_Repo/y/YAXT/YAXT-0.9.0-gpsmpi-2020.eb b/Golden_Repo/y/YAXT/YAXT-0.9.0-gpsmpi-2020.eb deleted file mode 100644 index ddc929c11c7cbf3e007557a99b8ff55b19341093..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/YAXT/YAXT-0.9.0-gpsmpi-2020.eb +++ /dev/null @@ -1,30 +0,0 @@ -# Warning: YAXT needs to be run from within an salloc, with a shared buildpath -# Example: -# salloc --partition=devel --nodes=1 -# eb --buildpath=$HOME/temp YAXT-0.9.0-gpsmpi-2019a.eb - -easyblock = 'ConfigureMake' - -name = 'YAXT' -version = '0.9.0' - -homepage = 'https://www.dkrz.de/redmine/projects/yaxt' -description = """Yet Another eXchange Tool""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'usempi': True} - -source_urls = ['https://www.dkrz.de/redmine/attachments/download/498/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['d3673e88c1cba3b77e0821393b94b5952d8ed7dc494305c8cf93e7ebec19483c'] - -preconfigopts = 'MPI_LAUNCH="$(which srun)"' - -sanity_check_paths = { - 'files': ["include/yaxt.h", "include/yaxt.mod", "lib/libyaxt.a", "lib/libyaxt.so"], - 'dirs': ["include/xt"], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/y/Yasm/Yasm-1.3.0-GCCcore-10.3.0.eb b/Golden_Repo/y/Yasm/Yasm-1.3.0-GCCcore-10.3.0.eb deleted file mode 100644 index 1bcc45de1fb8b084957ebd1d30e08af3b255a786..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/Yasm/Yasm-1.3.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -# # -# This file is an EasyBuild reciPY as per https://github.com/easybuilders -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of -# the policy: # https://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-90.html -# # - -easyblock = 'ConfigureMake' - -name = 'Yasm' -version = '1.3.0' - -homepage = 'https://www.tortall.net/projects/yasm/' -description = "Yasm: Complete rewrite of the NASM assembler with BSD license" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://www.tortall.net/projects/%(namelower)s/releases/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['3dce6601b495f5b3d45b59f7d2492a340ee7e84b5beca17e48f862502bd5603f'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -sanity_check_paths = { - 'files': ['bin/%(namelower)s'], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/y/Yasm/Yasm-1.3.0-GCCcore-9.3.0.eb b/Golden_Repo/y/Yasm/Yasm-1.3.0-GCCcore-9.3.0.eb deleted file mode 100644 index 9c1ddbb2f87a23cf5a20ccaf2e144446adaa5941..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/Yasm/Yasm-1.3.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -# # -# This file is an EasyBuild reciPY as per https://github.com/easybuilders -# -# Copyright:: Copyright 2012-2014 Uni.Lu/LCSB, NTUA -# Authors:: Fotis Georgatos <fotis@cern.ch> -# License:: MIT/GPL -# $Id$ -# -# This work implements a part of the HPCBIOS project and is a component of -# the policy: # https://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-90.html -# # - -easyblock = 'ConfigureMake' - -name = 'Yasm' -version = '1.3.0' - -homepage = 'https://www.tortall.net/projects/yasm/' -description = "Yasm: Complete rewrite of the NASM assembler with BSD license" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://www.tortall.net/projects/%(namelower)s/releases/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['3dce6601b495f5b3d45b59f7d2492a340ee7e84b5beca17e48f862502bd5603f'] - -builddependencies = [ - ('binutils', '2.34'), -] - -sanity_check_paths = { - 'files': ['bin/%(namelower)s'], - 'dirs': [], -} - -moduleclass = 'lang' diff --git a/Golden_Repo/y/yaff/molmod-1.4.5_cython_0.29_no_future.patch b/Golden_Repo/y/yaff/molmod-1.4.5_cython_0.29_no_future.patch deleted file mode 100644 index 41037b729bfa5a20004d1a5d8a6c9a2ee1e96f7e..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/yaff/molmod-1.4.5_cython_0.29_no_future.patch +++ /dev/null @@ -1,48 +0,0 @@ -diff -rupN molmod-1.4.5_orig/molmod/examples/000_units/a_reaction.py molmod-1.4.5/molmod/examples/000_units/a_reaction.py ---- molmod-1.4.5_orig/molmod/examples/000_units/a_reaction.py 2019-09-11 20:34:14.000000000 +0200 -+++ molmod-1.4.5/molmod/examples/000_units/a_reaction.py 2020-10-22 13:07:59.082851000 +0200 -@@ -2,7 +2,6 @@ - - from __future__ import print_function - --from past.utils import old_div - from molmod import * - - energy_react = -157.31456213 -diff -rupN molmod-1.4.5_orig/molmod/ext.pyx molmod-1.4.5/molmod/ext.pyx ---- molmod-1.4.5_orig/molmod/ext.pyx 2019-09-11 20:34:14.000000000 +0200 -+++ molmod-1.4.5/molmod/ext.pyx 2020-10-22 12:36:34.394077865 +0200 -@@ -1,3 +1,5 @@ -+#!python -+#cython: language_level=3 - # -*- coding: utf-8 -*- - # MolMod is a collection of molecular modelling tools for python. - # Copyright (C) 2007 - 2019 Toon Verstraelen <Toon.Verstraelen@UGent.be>, Center -diff -rupN molmod-1.4.5_orig/molmod/utils.py molmod-1.4.5/molmod/utils.py ---- molmod-1.4.5_orig/molmod/utils.py 2019-09-11 20:34:14.000000000 +0200 -+++ molmod-1.4.5/molmod/utils.py 2020-10-22 12:53:05.929120006 +0200 -@@ -25,7 +25,7 @@ - - from builtins import range - import numpy as np --from future.utils import with_metaclass -+from six import with_metaclass - - - __all__ = ["cached", "ReadOnlyAttribute", "ReadOnly", "compute_rmsd"] -diff -rupN molmod-1.4.5_orig/setup.py molmod-1.4.5/setup.py ---- molmod-1.4.5_orig/setup.py 2019-09-11 20:34:14.000000000 +0200 -+++ molmod-1.4.5/setup.py 2020-10-22 13:19:36.920606000 +0200 -@@ -91,10 +91,10 @@ setup( - "molmod/graphs.pxd", "molmod/similarity.h", "molmod/similarity.pxd", - "molmod/molecules.h", "molmod/molecules.pxd", "molmod/unit_cells.h", - "molmod/unit_cells.pxd"], -- include_dirs=[np.get_include()], -+ include_dirs=[np.get_include(), "molmod"], - )], - setup_requires=['numpy>=1.0', 'cython>=0.24.1'], -- install_requires=['numpy>=1.0', 'cython>=0.24.1', 'future'], -+ install_requires=['numpy>=1.0', 'cython>=0.24.1', 'six'], - classifiers=[ - 'Environment :: Console', - 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', diff --git a/Golden_Repo/y/yaff/molmod-1.4.5_deprecated_time_method.patch b/Golden_Repo/y/yaff/molmod-1.4.5_deprecated_time_method.patch deleted file mode 100644 index 33594754c112b9a5f3d0cc3358a0b45cdadc1d35..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/yaff/molmod-1.4.5_deprecated_time_method.patch +++ /dev/null @@ -1,42 +0,0 @@ -Molmod is not compatible with Python 3.8 due to its deprecated -use of time.clock() (which is no longer supported as of 3.8) - -Alan O'Cais, 20200324 -diff -Nru molmod-1.4.5.orig/molmod/log.py molmod-1.4.5/molmod/log.py ---- molmod-1.4.5.orig/molmod/log.py 2019-09-11 20:34:14.000000000 +0200 -+++ molmod-1.4.5/molmod/log.py 2020-03-20 10:05:41.874916780 +0100 -@@ -388,11 +388,11 @@ - - def start(self): - assert self._start is None -- self._start = time.clock() -+ self._start = time.process_time() - - def stop(self): - assert self._start is not None -- self.cpu += time.clock() - self._start -+ self.cpu += time.process_time() - self._start - self._start = None - - -diff -Nru molmod-1.4.5.orig/molmod/minimizer.py molmod-1.4.5/molmod/minimizer.py ---- molmod-1.4.5.orig/molmod/minimizer.py 2019-09-11 20:34:14.000000000 +0200 -+++ molmod-1.4.5/molmod/minimizer.py 2020-03-20 10:06:39.712207523 +0100 -@@ -1414,7 +1414,7 @@ - except ConstraintError: - self._screen("CONSTRAINT PROJECT FAILED", newline=True) - return False -- self.last_end = time.clock() -+ self.last_end = time.process_time() - - def propagate(self): - # compute the new direction -@@ -1474,7 +1474,7 @@ - else: - converged = False - # timing -- end = time.clock() -+ end = time.process_time() - self._screen("%5.2f" % (end - self.last_end), newline=True) - self.last_end = end - # check convergence, part 2 diff --git a/Golden_Repo/y/yaff/yaff-1.6.0-gpsmpi-2020-Python-3.8.5.eb b/Golden_Repo/y/yaff/yaff-1.6.0-gpsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index a2ca1257f3fa81f6cae6a9aa618fa951f8ccab01..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/yaff/yaff-1.6.0-gpsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,53 +0,0 @@ -# Updated from previous config -# Author: Pavel Grochal (INUITS) -# License: GPLv2 -# -# Building this in interactive Slurm session will result in freeze during either -# runtest phase or sanity_check_commands phase (python -c 'import yaff') -# -# If you submit this as non-interactive Slurm job, it will build just fine. -# Possibly root cause: https://github.com/h5py/h5py/issues/917 -# -easyblock = 'PythonBundle' - -name = 'yaff' -version = '1.6.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://molmod.github.io/yaff/' -description = "Yaff stands for 'Yet another force field'. It is a pythonic force-field code." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -dependencies = [ - ('Python', '3.8.5'), - ('h5py', '2.10.0', '-Python-%(pyver)s'), -] - -use_pip = True -sanity_pip_check = True - - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('molmod', '1.4.5', { - 'source_tmpl': SOURCE_TAR_GZ, - 'source_urls': ['https://github.com/%(name)s/%(name)s/releases/download/%(version)s'], - 'patches': ['%(name)s-%(version_major_minor)s.5_deprecated_time_method.patch'] + - ['molmod-1.4.5_cython_0.29_no_future.patch'], - 'runtest': "export MATPLOTLIBRC=$PWD; echo 'backend: agg' > $MATPLOTLIBRC/matplotlibrc; " + - "python setup.py build_ext -i; nosetests -v", - }), - (name, version, { - 'source_tmpl': SOURCE_TAR_GZ, - 'source_urls': ['https://github.com/molmod/%(name)s/releases/download/%(version)s'], - 'checksums': ['a266ab032778e37bb2e93152aefb67f396827aa728151651403984429c74ceaa'], - 'runtest': "export MATPLOTLIBRC=$PWD; echo 'backend: agg' > $MATPLOTLIBRC/matplotlibrc; " + - "python setup.py build_ext -i; nosetests -v", - }), -] - -moduleclass = 'chem' diff --git a/Golden_Repo/y/yaff/yaff-1.6.0-ipsmpi-2020-Python-3.8.5.eb b/Golden_Repo/y/yaff/yaff-1.6.0-ipsmpi-2020-Python-3.8.5.eb deleted file mode 100644 index 552f394e46bf1b4ebd37df3196197569256b035c..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/yaff/yaff-1.6.0-ipsmpi-2020-Python-3.8.5.eb +++ /dev/null @@ -1,55 +0,0 @@ -# Updated from previous config -# Author: Pavel Grochal (INUITS) -# License: GPLv2 -# -# Building this in interactive Slurm session will result in freeze during either -# runtest phase or sanity_check_commands phase (python -c 'import yaff') -# -# If you submit this as non-interactive Slurm job, it will build just fine. -# Possibly root cause: https://github.com/h5py/h5py/issues/917 -# -easyblock = 'PythonBundle' - -name = 'yaff' -version = '1.6.0' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://molmod.github.io/yaff/' -description = "Yaff stands for 'Yet another force field'. It is a pythonic force-field code." - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -dependencies = [ - ('Python', '3.8.5'), - ('h5py', '2.10.0', '-Python-%(pyver)s'), -] - -use_pip = True -sanity_pip_check = True - -# required because we're building a Python package using Intel compilers on top of Python built with GCC -check_ldshared = True - -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('molmod', '1.4.5', { - 'source_tmpl': SOURCE_TAR_GZ, - 'source_urls': ['https://github.com/%(name)s/%(name)s/releases/download/%(version)s'], - 'patches': ['%(name)s-%(version_major_minor)s.5_deprecated_time_method.patch'] + - ['molmod-1.4.5_cython_0.29_no_future.patch'], - 'runtest': "export MATPLOTLIBRC=$PWD; echo 'backend: agg' > $MATPLOTLIBRC/matplotlibrc; " + - "python setup.py build_ext -i; nosetests -v", - }), - (name, version, { - 'source_tmpl': SOURCE_TAR_GZ, - 'source_urls': ['https://github.com/molmod/%(name)s/releases/download/%(version)s'], - 'checksums': ['a266ab032778e37bb2e93152aefb67f396827aa728151651403984429c74ceaa'], - 'runtest': "export MATPLOTLIBRC=$PWD; echo 'backend: agg' > $MATPLOTLIBRC/matplotlibrc; " + - "python setup.py build_ext -i; nosetests -v", - }), -] - -moduleclass = 'chem' diff --git a/Golden_Repo/y/yuicompressor/yuicompressor-2.4.8-GCCcore-10.3.0-Python-3.8.5.eb b/Golden_Repo/y/yuicompressor/yuicompressor-2.4.8-GCCcore-10.3.0-Python-3.8.5.eb deleted file mode 100644 index d5780de3ab4db646a3813f5f4829f3b844869bd4..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/yuicompressor/yuicompressor-2.4.8-GCCcore-10.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'yuicompressor' -version = '2.4.8' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/sprat/yuicompressor' -description = """YUI Compressor is a JavaScript and CSS minifier written in Java.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/sprat/yuicompressor/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['0054abb77cc151147597aeaa5b47b6843925d3293e2e44d5b36e68ee54a1154f'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Java', '15', '', SYSTEM), -] - -use_pip = True -sanity_pip_check = True -download_dep_fail = True - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages', 'lib64/python%(pyshortver)s/site-packages'], -} - -sanity_check_paths = { - 'files': ['bin/%(name)s'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/%(name)s'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/y/yuicompressor/yuicompressor-2.4.8-GCCcore-9.3.0-Python-3.8.5.eb b/Golden_Repo/y/yuicompressor/yuicompressor-2.4.8-GCCcore-9.3.0-Python-3.8.5.eb deleted file mode 100644 index 585047becdcc0b838c0c31fa9351e8b38c505587..0000000000000000000000000000000000000000 --- a/Golden_Repo/y/yuicompressor/yuicompressor-2.4.8-GCCcore-9.3.0-Python-3.8.5.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'PythonPackage' - -name = 'yuicompressor' -version = '2.4.8' -versionsuffix = '-Python-%(pyver)s' - -homepage = 'https://github.com/sprat/yuicompressor' -description = """YUI Compressor is a JavaScript and CSS minifier written in Java.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/sprat/yuicompressor/archive/'] -sources = ['%(version)s.tar.gz'] -checksums = ['0054abb77cc151147597aeaa5b47b6843925d3293e2e44d5b36e68ee54a1154f'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('Python', '3.8.5'), - ('Java', '1.8', '', SYSTEM), -] - -use_pip = True -sanity_pip_check = True -download_dep_fail = True - -modextrapaths = { - 'PYTHONPATH': ['lib/python%(pyshortver)s/site-packages', 'lib64/python%(pyshortver)s/site-packages'], -} - -sanity_check_paths = { - 'files': ['bin/%(name)s'], - 'dirs': ['lib/python%(pyshortver)s/site-packages/%(name)s'], -} - -moduleclass = 'vis' diff --git a/Golden_Repo/z/Z3/Z3-4.8.9-GCCcore-10.3.0.eb b/Golden_Repo/z/Z3/Z3-4.8.9-GCCcore-10.3.0.eb deleted file mode 100644 index e4302d1204499086ba2680e8c7b8bc8f114c3156..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/Z3/Z3-4.8.9-GCCcore-10.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Z3' -version = '4.8.9' - -homepage = 'https://github.com/Z3Prover/z3' -description = """ - Z3 is a theorem prover from Microsoft Research. -""" -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/Z3Prover/z3/archive/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['c9fd04b9b33be74fffaac3ec2bc2c320d1a4cc32e395203c55126b12a14ff3f4'] - -builddependencies = [ - ('CMake', '3.18.0', '', SYSTEM), - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.36.1'), -] - -dependencies = [ - ('GMP', '6.2.0'), -] - -configopts = '-DZ3_USE_LIB_GMP=ON -DZ3_LINK_TIME_OPTIMIZATION=ON ' - -sanity_check_paths = { - 'files': ['bin/z3', 'include/z3_api.h', 'lib/libz3.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/z/Z3/Z3-4.8.9-GCCcore-9.3.0.eb b/Golden_Repo/z/Z3/Z3-4.8.9-GCCcore-9.3.0.eb deleted file mode 100644 index 4221f9fd2d14b8812ce7387b32de5940db6c4d48..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/Z3/Z3-4.8.9-GCCcore-9.3.0.eb +++ /dev/null @@ -1,35 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Z3' -version = '4.8.9' - -homepage = 'https://github.com/Z3Prover/z3' -description = """ - Z3 is a theorem prover from Microsoft Research. -""" -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/Z3Prover/z3/archive/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['c9fd04b9b33be74fffaac3ec2bc2c320d1a4cc32e395203c55126b12a14ff3f4'] - -builddependencies = [ - ('CMake', '3.18.0'), - # use same binutils version that was used when building GCCcore toolchain - ('binutils', '2.34'), -] - -dependencies = [ - ('GMP', '6.2.0'), -] - -configopts = '-DZ3_USE_LIB_GMP=ON -DZ3_LINK_TIME_OPTIMIZATION=ON ' - -sanity_check_paths = { - 'files': ['bin/z3', 'include/z3_api.h', 'lib/libz3.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'tools' diff --git a/Golden_Repo/z/ZeroMQ/ZeroMQ-4.3.3-GCCcore-10.3.0.eb b/Golden_Repo/z/ZeroMQ/ZeroMQ-4.3.3-GCCcore-10.3.0.eb deleted file mode 100644 index e77f50862fc61b444749eb422d4823930f5cd411..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/ZeroMQ/ZeroMQ-4.3.3-GCCcore-10.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ZeroMQ' -version = '4.3.3' - -homepage = 'https://www.zeromq.org/' -description = """ZeroMQ looks like an embeddable networking library but acts like a concurrency framework. - It gives you sockets that carry atomic messages across various transports like in-process, - inter-process, TCP, and multicast. You can connect sockets N-to-N with patterns like fanout, - pub-sub, task distribution, and request-reply. It's fast enough to be the fabric for clustered - products. Its asynchronous I/O model gives you scalable multicore applications, built as asynchronous - message-processing tasks. It has a score of language APIs and runs on most operating systems.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -source_urls = ['https://github.com/zeromq/libzmq/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['9d9285db37ae942ed0780c016da87060497877af45094ff9e1a1ca736e3875a2'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('pkg-config', '0.29.2'), -] - -configopts = '--with-pic ' - -sanity_check_paths = { - 'files': ['lib/libzmq.%s' % SHLIB_EXT, 'lib/libzmq.a'], - 'dirs': ['include', 'lib'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/z/ZeroMQ/ZeroMQ-4.3.3-GCCcore-9.3.0.eb b/Golden_Repo/z/ZeroMQ/ZeroMQ-4.3.3-GCCcore-9.3.0.eb deleted file mode 100644 index c52a6ac6591519754ef5c8796fe8a5cac3a65ee7..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/ZeroMQ/ZeroMQ-4.3.3-GCCcore-9.3.0.eb +++ /dev/null @@ -1,34 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'ZeroMQ' -version = '4.3.3' - -homepage = 'https://www.zeromq.org/' -description = """ZeroMQ looks like an embeddable networking library but acts like a concurrency framework. - It gives you sockets that carry atomic messages across various transports like in-process, - inter-process, TCP, and multicast. You can connect sockets N-to-N with patterns like fanout, - pub-sub, task distribution, and request-reply. It's fast enough to be the fabric for clustered - products. Its asynchronous I/O model gives you scalable multicore applications, built as asynchronous - message-processing tasks. It has a score of language APIs and runs on most operating systems.""" - -site_contacts = 'j.goebbert@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://github.com/zeromq/libzmq/releases/download/v%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['9d9285db37ae942ed0780c016da87060497877af45094ff9e1a1ca736e3875a2'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -configopts = '--with-pic ' - -sanity_check_paths = { - 'files': ['lib/libzmq.%s' % SHLIB_EXT, 'lib/libzmq.a'], - 'dirs': ['include', 'lib'], -} - -moduleclass = 'devel' diff --git a/Golden_Repo/z/Zip/Zip-3.0-GCCcore-10.3.0.eb b/Golden_Repo/z/Zip/Zip-3.0-GCCcore-10.3.0.eb deleted file mode 100644 index 7c2834d239fac9dbc92a6d0845d322da41418dd8..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/Zip/Zip-3.0-GCCcore-10.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Zip' -version = '3.0' - -homepage = 'http://www.info-zip.org/Zip.html' -description = """Zip is a compression and file packaging/archive utility. -Although highly compatible both with PKWARE's PKZIP and PKUNZIP -utilities for MS-DOS and with Info-ZIP's own UnZip, our primary objectives -have been portability and other-than-MSDOS functionality""" - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://download.sourceforge.net/infozip'] -sources = ['%(namelower)s%(version_major)s%(version_minor)s.tar.gz'] -checksums = ['f0e8bb1f9b7eb0b01285495a2699df3a4b766784c1765a8f1aeedf63c0806369'] - -builddependencies = [ - ('binutils', '2.36.1'), -] -dependencies = [ - ('bzip2', '1.0.8'), -] - -skipsteps = ['configure'] - -buildopts = '-f unix/Makefile CC="$CC" IZ_OUR_BZIP2_DIR=$EBROOTBZIP2 ' -buildopts += 'CFLAGS="$CFLAGS -I. -DUNIX -DBZIP2_SUPPORT -DUNICODE_SUPPORT -DLARGE_FILE_SUPPORT" ' -buildopts += 'generic_gcc' - -installopts = '-f unix/Makefile prefix=%(installdir)s ' - -sanity_check_paths = { - 'files': ['bin/zip', 'bin/zipcloak', 'bin/zipnote', 'bin/zipsplit'], - 'dirs': ['man/man1'] -} - -sanity_check_commands = ["zip --version"] - -moduleclass = 'tools' diff --git a/Golden_Repo/z/Zip/Zip-3.0-GCCcore-9.3.0.eb b/Golden_Repo/z/Zip/Zip-3.0-GCCcore-9.3.0.eb deleted file mode 100644 index 5c07caec933b154dd6370049fa026609fcaa1633..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/Zip/Zip-3.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,42 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'Zip' -version = '3.0' - -homepage = 'http://www.info-zip.org/Zip.html' -description = """Zip is a compression and file packaging/archive utility. -Although highly compatible both with PKWARE's PKZIP and PKUNZIP -utilities for MS-DOS and with Info-ZIP's own UnZip, our primary objectives -have been portability and other-than-MSDOS functionality""" - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -site_contacts = 'a.strube@fz-juelich.de' - -source_urls = ['https://download.sourceforge.net/infozip'] -sources = ['%(namelower)s%(version_major)s%(version_minor)s.tar.gz'] -checksums = ['f0e8bb1f9b7eb0b01285495a2699df3a4b766784c1765a8f1aeedf63c0806369'] - -builddependencies = [ - ('binutils', '2.34'), -] -dependencies = [ - ('bzip2', '1.0.8'), -] - -skipsteps = ['configure'] - -buildopts = '-f unix/Makefile CC="$CC" IZ_OUR_BZIP2_DIR=$EBROOTBZIP2 ' -buildopts += 'CFLAGS="$CFLAGS -I. -DUNIX -DBZIP2_SUPPORT -DUNICODE_SUPPORT -DLARGE_FILE_SUPPORT" ' -buildopts += 'generic_gcc' - -installopts = '-f unix/Makefile prefix=%(installdir)s ' - -sanity_check_paths = { - 'files': ['bin/zip', 'bin/zipcloak', 'bin/zipnote', 'bin/zipsplit'], - 'dirs': ['man/man1'] -} - -sanity_check_commands = ["zip --version"] - -moduleclass = 'tools' diff --git a/Golden_Repo/z/zlib/zlib-1.2.11-GCCcore-10.3.0.eb b/Golden_Repo/z/zlib/zlib-1.2.11-GCCcore-10.3.0.eb deleted file mode 100644 index d733a6f2b3767c2372a65aae6db4278aadbe9db8..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/zlib/zlib-1.2.11-GCCcore-10.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'zlib' -version = '1.2.11' - -homepage = 'https://www.zlib.net/' -description = """zlib is designed to be a free, general-purpose, legally unencumbered -- that is, - not covered by any patents -- lossless data-compression library for use on virtually any - computer hardware and operating system. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = ['https://zlib.net/fossils'] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1'] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [('binutils', '2.36.1', '', SYSTEM)] - -sanity_check_paths = { - 'files': ['include/zconf.h', 'include/zlib.h', 'lib/libz.a', 'lib/libz.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/z/zlib/zlib-1.2.11-GCCcore-9.3.0.eb b/Golden_Repo/z/zlib/zlib-1.2.11-GCCcore-9.3.0.eb deleted file mode 100644 index b203a1c91d2858d3c034d0a224eac241ce7b64e1..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/zlib/zlib-1.2.11-GCCcore-9.3.0.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'zlib' -version = '1.2.11' - -homepage = 'http://www.zlib.net/' -description = """zlib is designed to be a free, general-purpose, legally unencumbered -- that is, - not covered by any patents -- lossless data-compression library for use on virtually any - computer hardware and operating system. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [('http://sourceforge.net/projects/libpng/files/zlib/%(version)s', 'download')] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [('binutils', '2.34', '', SYSTEM)] - -sanity_check_paths = { - 'files': ['include/zconf.h', 'include/zlib.h', 'lib/libz.a', 'lib/libz.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/z/zlib/zlib-1.2.11.eb b/Golden_Repo/z/zlib/zlib-1.2.11.eb deleted file mode 100644 index ae1f3363a794a9901cb8db80128dc94928b85153..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/zlib/zlib-1.2.11.eb +++ /dev/null @@ -1,28 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'zlib' -version = '1.2.11' - -homepage = 'http://www.zlib.net/' -description = """zlib is designed to be a free, general-purpose, legally unencumbered -- that is, - not covered by any patents -- lossless data-compression library for use on virtually any - computer hardware and operating system. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [('http://sourceforge.net/projects/libpng/files/zlib/%(version)s', 'download')] - -# need to take care of $CFLAGS ourselves with system toolchain -# we need to add -fPIC, but should also include -O* option to avoid compiling with -O0 (default for GCC) -buildopts = 'CFLAGS="-O2 -fPIC"' - -sanity_check_paths = { - 'files': ['include/zconf.h', 'include/zlib.h', 'lib/libz.a', 'lib/libz.%s' % SHLIB_EXT], - 'dirs': [], -} - -moduleclass = 'lib' diff --git a/Golden_Repo/z/zsh/zsh-5.8-GCCcore-10.3.0.eb b/Golden_Repo/z/zsh/zsh-5.8-GCCcore-10.3.0.eb deleted file mode 100644 index ab9a21b766b26336e875ce925ff499bb6019b4e2..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/zsh/zsh-5.8-GCCcore-10.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'zsh' -version = '5.8' - -homepage = 'http://www.zsh.org/' -description = """ -Zsh is a shell designed for interactive use, although it is also a powerful scripting language. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = [SOURCELOWER_TAR_XZ] -source_urls = ['http://prdownloads.sourceforge.net/%(namelower)s'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('ncurses', '6.2'), -] - -modextrapaths = { - 'FPATH': 'share/zsh/%(version)s/functions' -} - -moduleclass = 'tools' diff --git a/Golden_Repo/z/zsh/zsh-5.8-GCCcore-9.3.0.eb b/Golden_Repo/z/zsh/zsh-5.8-GCCcore-9.3.0.eb deleted file mode 100644 index 98b42a95ba56154f156dbd0a5be4d01a10bee9c1..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/zsh/zsh-5.8-GCCcore-9.3.0.eb +++ /dev/null @@ -1,30 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'zsh' -version = '5.8' - -homepage = 'http://www.zsh.org/' -description = """ -Zsh is a shell designed for interactive use, although it is also a powerful scripting language. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_XZ] -source_urls = ['http://prdownloads.sourceforge.net/%(namelower)s'] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('ncurses', '6.2'), -] - -modextrapaths = { - 'FPATH': 'share/zsh/%(version)s/functions' -} - -moduleclass = 'tools' diff --git a/Golden_Repo/z/zstd/zstd-1.4.9-GCCcore-10.3.0.eb b/Golden_Repo/z/zstd/zstd-1.4.9-GCCcore-10.3.0.eb deleted file mode 100644 index a1a32967c58fe3006e1a4e00a3b4eab61620a5a1..0000000000000000000000000000000000000000 --- a/Golden_Repo/z/zstd/zstd-1.4.9-GCCcore-10.3.0.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'zstd' -version = '1.4.9' - -homepage = 'https://facebook.github.io/zstd' -description = """Zstandard is a real-time compression algorithm, providing high compression ratios. - It offers a very wide range of compression/speed trade-off, while being backed by a very fast decoder. - It also offers a special mode for small data, called dictionary compression, and can create dictionaries - from any sample set.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -github_account = 'facebook' -source_urls = [GITHUB_SOURCE] -sources = ['v%(version)s.tar.gz'] -checksums = ['acf714d98e3db7b876e5b540cbf6dee298f60eb3c0723104f6d3f065cd60d6a8'] - -builddependencies = [ - ('binutils', '2.36.1'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('XZ', '5.2.5'), - ('lz4', '3.1.0', '-Python-3.8.5'), -] - -skipsteps = ['configure'] - -runtest = 'check' - -installopts = "PREFIX=%(installdir)s" - -sanity_check_paths = { - 'files': ["bin/zstd", "lib/libzstd.%s" % SHLIB_EXT, "include/zstd.h"], - 'dirs': ["lib/pkgconfig"] -} - -moduleclass = 'lib' diff --git a/Overlays/hdfml_overlay/e/Embree/Embree-3.8.0-GCC-9.3.0.eb b/Overlays/hdfml_overlay/e/Embree/Embree-3.8.0-GCC-9.3.0.eb deleted file mode 100644 index 0daf9d3448bf03489634c55ca244e193be36fa98..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/e/Embree/Embree-3.8.0-GCC-9.3.0.eb +++ /dev/null @@ -1,58 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Embree' -version = '3.8.0' - -homepage = 'http://www.ospray.org/' -description = """ -Embree is a collection of high-performance ray tracing kernels, developed at Intel. The target user of Embree are -graphics application engineers that want to improve the performance of their application by leveraging the optimized ray -tracing kernels of Embree. The kernels are optimized for photo-realistic rendering on the latest Intel processors with -support for SSE, AVX, AVX2, and AVX512. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'optarch': True, 'pic': True} - -source_urls = ['https://github.com/embree/embree/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('ispc', '1.12.0', '', SYSTEM), -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('ImageMagick', '7.0.10-25'), - ('OpenEXR', '2.5.2'), - ('tbb', '2020.3'), -] - -separate_build_dir = True - -configopts = '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DEMBREE_ISPC_SUPPORT=ON ' -configopts += '-DEMBREE_TASKING_SYSTEM=TBB ' -configopts += '-DEMBREE_TBB_ROOT=$EBROOTTBB ' -# Select highest supported ISA (SSE2, SSE4.2, AVX, AVX2, AVX512KNL, AVX512SKX, or NONE) -configopts += '-DEMBREE_MAX_ISA=AVX512SKX ' -configopts += '-DEMBREE_TUTORIALS=OFF ' - -sanity_check_paths = { - 'dirs': ['include/embree3'], - 'files': ['lib64/libembree3.so'] -} - -modextrapaths = { - 'CMAKE_MODULE_PATH': 'lib64/cmake/embree-%(version)s/' -} - -moduleclass = 'vis' diff --git a/Overlays/hdfml_overlay/h/HDF/HDF-4.2.15-GCC-9.3.0.eb b/Overlays/hdfml_overlay/h/HDF/HDF-4.2.15-GCC-9.3.0.eb deleted file mode 100644 index 6178b0c12adac5ef08a67a76dd1afc8cb3e585d7..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/h/HDF/HDF-4.2.15-GCC-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HDF' -version = '4.2.15' - -homepage = 'http://www.hdfgroup.org/products/hdf4/' -description = """HDF (also known as HDF4) is a library and multi-object file format for storing - and managing data between machines. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('Java', '15', '', SYSTEM), -] - -dependencies = [ - ('Szip', '2.1.1'), - ('zlib', '1.2.11'), - ('JasPer', '2.0.19'), - ('libjpeg-turbo', '2.0.5'), -] - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.split('-')[0]] - -configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --enable-java ' -configopts += '--includedir=%(installdir)s/include/%(namelower)s ' - -sanity_check_paths = { - 'files': ['lib/libdf.a', 'lib/libhdf4.settings', 'lib/libmfhdf.a'], - 'dirs': ['bin', 'include/hdf'], -} - -moduleclass = 'data' diff --git a/Overlays/hdfml_overlay/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/hdfml_overlay/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index b3d0a06f6ac30a4a58270b00cf6ea4150dc7eec3..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HDF' -version = '4.2.15' - -homepage = 'http://www.hdfgroup.org/products/hdf4/' -description = """HDF (also known as HDF4) is a library and multi-object file format for storing - and managing data between machines. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('Java', '15', '', SYSTEM), -] - -dependencies = [ - ('Szip', '2.1.1'), - ('zlib', '1.2.11'), - ('JasPer', '2.0.19'), - ('libjpeg-turbo', '2.0.5'), -] - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.split('-')[0]] - -configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --enable-java ' -configopts += '--includedir=%(installdir)s/include/%(namelower)s ' - -sanity_check_paths = { - 'files': ['lib/libdf.a', 'lib/libhdf4.settings', 'lib/libmfhdf.a'], - 'dirs': ['bin', 'include/hdf'], -} - -moduleclass = 'data' diff --git a/Overlays/hdfml_overlay/m/M4/M4-1.4.18-GCCcore-9.3.0.eb b/Overlays/hdfml_overlay/m/M4/M4-1.4.18-GCCcore-9.3.0.eb deleted file mode 100644 index 32af8ba7451cb5c0b4a7fbbd3d7a6898e064d088..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/m/M4/M4-1.4.18-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'M4' -version = '1.4.18' - -homepage = 'http://www.gnu.org/software/m4/m4.html' -description = """GNU M4 is an implementation of the traditional Unix macro processor. It is mostly SVR4 compatible - although it has some extensions (for example, handling more than 9 positional parameters to macros). - GNU M4 also has built-in functions for including files, running shell commands, doing arithmetic, etc. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [('binutils', '2.34', '', SYSTEM)] - -configopts = "--enable-cxx" - -sanity_check_paths = { - 'files': ["bin/m4"], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Overlays/hdfml_overlay/m/M4/M4-1.4.18.eb b/Overlays/hdfml_overlay/m/M4/M4-1.4.18.eb deleted file mode 100644 index d686b2eb04b511fde3cecd0ea74d3d3cb012f7d5..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/m/M4/M4-1.4.18.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'M4' -version = '1.4.18' - -homepage = 'http://www.gnu.org/software/m4/m4.html' -description = """GNU M4 is an implementation of the traditional Unix macro processor. - It is mostly SVR4 compatible although it has some extensions - (for example, handling more than 9 positional parameters to macros). - GNU M4 also has built-in functions for including files, running shell commands, doing arithmetic, etc. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -configopts = "--enable-cxx" - -sanity_check_paths = { - 'files': ["bin/m4"], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb b/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb deleted file mode 100644 index ff32059cce7c27258b4952e4fe52fc7e0ccffb70..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.8.1', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -# No IME in HDFML -# configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 767e38682a9f630328659620001b7647c13b5429..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.8.1', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -# No IME in HDFML -# configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-GCC-10.3.0.eb b/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-GCC-10.3.0.eb deleted file mode 100644 index 096cbb25aec075396cf475e3a4419d1a38c07c7a..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-GCC-10.3.0.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.1' - -homepage = 'https://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['e24f7a778bd11a71ad0c14587a7f5b00e68a71aa5623e2157bafee3d44c07cda'] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.4.1'), - ('UCX', '1.10.1', '', SYSTEM), - ('CUDA', '11.3', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -# No IME in HDFML -# configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", - "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-NVHPC-21.5-GCC-10.3.0.eb b/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-NVHPC-21.5-GCC-10.3.0.eb deleted file mode 100644 index 9d9f437bf1595d832284b89904a3040eb6c7eabd..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-NVHPC-21.5-GCC-10.3.0.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.1' - -homepage = 'https://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '21.5-GCC-10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['e24f7a778bd11a71ad0c14587a7f5b00e68a71aa5623e2157bafee3d44c07cda'] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.4.1'), - ('UCX', '1.10.1', '', SYSTEM), - ('CUDA', '11.3', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -# No IME in HDFML -# configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", - "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index 78ae716b43bb195bc15047c80e6ab41b1c432231..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/o/OpenMPI/OpenMPI-4.1.1-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,64 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.1' - -homepage = 'https://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_BZ2] -checksums = ['e24f7a778bd11a71ad0c14587a7f5b00e68a71aa5623e2157bafee3d44c07cda'] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.4.1'), - ('UCX', '1.10.1', '', SYSTEM), - ('CUDA', '11.3', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -# No IME in HDFML -# configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", - "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb b/Overlays/hdfml_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb deleted file mode 100644 index 374e4e52bb3e11daecccf8504d074bc23f673c41..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb +++ /dev/null @@ -1,98 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyTorch-Geometric' -version = '1.6.3' -local_pytorch_ver = '1.7.0' -versionsuffix = '-Python-%%(pyver)s-PyTorch-%s' % local_pytorch_ver - -homepage = 'https://github.com/rusty1s/pytorch_geometric' -description = "PyTorch Geometric (PyG) is a geometric deep learning extension library for PyTorch." - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -local_pysuff = '-Python-%(pyver)s' -dependencies = [ - ('Python', '3.8.5'), - ('PyTorch', local_pytorch_ver, local_pysuff), - ('numba', '0.51.1', local_pysuff), - ('h5py', '2.10.0', '-serial%s' % local_pysuff), - ('scikit', '2020', local_pysuff), - ('torchvision', '0.8.2', local_pysuff), - ('trimesh', '3.8.11', local_pysuff), - ('METIS', '5.1.0', '-IDX64'), -] - -use_pip = True - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_download_dep_fail = True -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('gdist', '1.0.3', { - 'source_urls': ['https://pypi.python.org/packages/source/g/gdist'], - 'modulename': 'gdist', - }), - ('rdflib', '5.0.0', { - 'checksums': ['78149dd49d385efec3b3adfbd61c87afaf1281c30d3fcaf1b323b34f603fb155'], - 'modulename': 'rdflib', - }), - ('googledrivedownloader', '0.4', { - 'checksums': ['4b34c1337b2ff3bf2bd7581818efbdcaea7d50ffd484ccf80809688f5ca0e204'], - 'modulename': 'google_drive_downloader', - }), - ('plyfile', '0.7.2', { - 'checksums': ['59a25845d00a51098e6c9147c3c96ce89ad97395e256a4fabb4aed7cf7db5541'], - }), - ('torch_scatter', '2.0.5', { - 'patches': ['torch_scatter-2.0.5-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '2.0.5.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_scatter/archive/'], - }), - ('torch_sparse', '0.6.8', { - 'patches': ['torch_sparse-0.6.8-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'preinstallopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'source_tmpl': '0.6.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_sparse/archive/'], - }), - ('torch_cluster', '1.5.8', { - 'patches': ['torch_cluster-1.5.8-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.5.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_cluster/archive/'], - }), - ('torch_spline_conv', '1.2.0', { - 'patches': ['torch_spline_conv-1.2.0-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.2.0.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_spline_conv/archive'], - }), - ('ase', '3.21.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/ase'], - 'modulename': 'ase', - }), - ('python-louvain', '0.15', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-louvain'], - 'checksums': ['2a856edfbe29952a60a5538a84bb78cca18f6884a88b9325e85a11c8dd4917eb'], - 'modulename': 'community', - }), - ('tqdm', '4.56.0', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tqdm'], - 'modulename': 'tqdm', - }), - ('torch_geometric', version, { - 'checksums': ['347f693bebcc8a621eda4867dafab91c04db5f596d7ed7ecb89b242f8ab5c6a1'], - }), -] - -sanity_pip_check = True - -moduleclass = 'devel' diff --git a/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch b/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch deleted file mode 100644 index 25bbf0ec24a03461821a0b38e4ef043efcb052d0..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:10:11.609352000 +0100 -+++ setup.py 2021-01-20 10:10:37.525550350 +0100 -@@ -39,7 +39,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch b/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch deleted file mode 100644 index 94ae43ed4ef1a4d0788e0d08a0564b8978868506..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 09:53:22.085271000 +0100 -+++ setup.py 2021-01-20 09:53:54.835241801 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch b/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch deleted file mode 100644 index 1a4f2d0fbc9e83c52a46f82dd5abfc6bdf00af82..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:07:15.518446000 +0100 -+++ setup.py 2021-01-20 10:07:51.389877000 +0100 -@@ -53,7 +53,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - if sys.platform == 'win32': diff --git a/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch b/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch deleted file mode 100644 index e0a3e0ccf186aef0a4c22892b79ae0db810f351c..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:12:33.326687000 +0100 -+++ setup.py 2021-01-20 10:12:51.492198482 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb deleted file mode 100644 index 21f09deef840d0cde98008b123946d7ce310091a..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb +++ /dev/null @@ -1,56 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-NVHPC-20.7-GCC-9.3.0.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-NVHPC-20.7-GCC-9.3.0.eb deleted file mode 100644 index c53c567ec34380c76ed54376ca4cbf46d96b337d..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-NVHPC-20.7-GCC-9.3.0.eb +++ /dev/null @@ -1,58 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '20.7-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# Needed since nvfortran can't deal with --allow-shlib-undefined, but pgf77 can -preconfigopts = 'unset F77 && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-NVHPC-20.9-GCC-9.3.0.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-NVHPC-20.9-GCC-9.3.0.eb deleted file mode 100644 index 976c480ac3330a2c9b93187b67c966dd1861e843..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-NVHPC-20.9-GCC-9.3.0.eb +++ /dev/null @@ -1,58 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '20.9-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# Needed since nvfortran can't deal with --allow-shlib-undefined, but pgf77 can -preconfigopts = 'unset F77 && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb deleted file mode 100644 index 3f936a6620c95ebf2a9c9ebd8793d94d8374191f..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb +++ /dev/null @@ -1,57 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' -versionsuffix = '-mt' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = True - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index f6eec5379268acf800dc085c46de1b964d446afd..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,56 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.8-1-NVHPC-21.1-GCC-9.3.0.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.8-1-NVHPC-21.1-GCC-9.3.0.eb deleted file mode 100644 index 8a7ee6fd6c28e32a84f5143489d45ac7a186fa68..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.8-1-NVHPC-21.1-GCC-9.3.0.eb +++ /dev/null @@ -1,58 +0,0 @@ -name = 'psmpi' -version = '5.4.8-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '21.1-GCC-9.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -builddependencies = [('CUDA', '11.0', '', SYSTEM)] -dependencies = [ - ('pscom', '5.4-default', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# Needed since nvfortran can't deal with --allow-shlib-undefined, but pgf77 can -preconfigopts = 'unset F77 && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = False - -cuda = True - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings/CUDA") ) then - load("mpi-settings/CUDA") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb deleted file mode 100644 index 64310b8b00038e262035a22a94869a631b32dd9f..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -local_cuda_ver = '11.3' - -builddependencies = [('CUDA', local_cuda_ver, '', SYSTEM)] -dependencies = [ - ('pscom', f'5.4-default-CUDA-{local_cuda_ver}', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -# Needed due to changes in GCC10. See https://github.com/pmodels/mpich/issues/4300 -preconfigopts = 'export FCFLAGS="-fallow-argument-mismatch $FCFLAGS" && ' -preconfigopts += 'export FFLAGS="-fallow-argument-mismatch $FFLAGS" && ' - -threaded = False - -cuda = True - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-NVHPC-21.5-GCC-10.3.0.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-NVHPC-21.5-GCC-10.3.0.eb deleted file mode 100644 index 36e6ee76aa8c897dbc0bab63b9cdbdb1c0119c96..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-NVHPC-21.5-GCC-10.3.0.eb +++ /dev/null @@ -1,56 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'NVHPC', 'version': '21.5-GCC-10.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -local_cuda_ver = '11.3' - -builddependencies = [('CUDA', local_cuda_ver, '', SYSTEM)] -dependencies = [ - ('pscom', f'5.4-default-CUDA-{local_cuda_ver}', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = False - -cuda = True - -# We need this here since the hook does not consider the compiler toolchain when injecting these vars -# Add a family for our naming scheme -modluafooter = ''' -add_property("arch","gpu") -family("mpi") -if not ( isloaded("mpi-settings/CUDA") ) then - load("mpi-settings/CUDA") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb deleted file mode 100644 index 08ce72279cdb1bff43d11d04c05886bfc8e320fb..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb +++ /dev/null @@ -1,47 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' -versionsuffix = '-mt' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -local_cuda_ver = '11.3' - -builddependencies = [('CUDA', local_cuda_ver, '', SYSTEM)] -dependencies = [ - ('pscom', f'5.4-default-CUDA-{local_cuda_ver}', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = True - -cuda = True - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index 66b9cdd051c9efb287f1e2452d911b260da55ad9..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,46 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -sources = ['%(version)s.tar.gz'] -source_urls = ['https://github.com/ParaStation/psmpi/archive/'] - -local_cuda_ver = '11.3' - -builddependencies = [('CUDA', local_cuda_ver, '', SYSTEM)] -dependencies = [ - ('pscom', f'5.4-default-CUDA-{local_cuda_ver}', '', SYSTEM), - # needed due to the inclusion of hwloc - ('libxml2', '2.9.10') -] - -patches = [ - 'psmpi_shebang.patch', - # We don't have IME in HDFML so we skip this - # 'psmpi-5.4.6-1_ime.patch' -] - -# We don't have IME in HDFML so we skip this -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# -# preconfigopts = 'export CFLAGS="-I/opt/ddn/ime/include $CFLAGS" && ' -# preconfigopts += 'export LDFLAGS="$LDFLAGS -L/opt/ddn/ime/lib -lim_client" && ' -# mpich_opts = '--enable-static --with-file-system=ime+ufs+gpfs --enable-romio' -# We disable gpfs support, since it seems to be problematic under some circumstances. One can disable it by setting -# ROMIO_FSTYPE_FORCE="ufs:", but then we loose IME support -mpich_opts = '--enable-static --with-file-system=ufs --enable-romio' - -threaded = False - -cuda = True - -moduleclass = 'mpi' diff --git a/Overlays/hdfml_overlay/u/UCX/UCX-1.8.1.eb b/Overlays/hdfml_overlay/u/UCX/UCX-1.8.1.eb deleted file mode 100644 index df14257476b2a77486b622bf5d35c9ed4795803a..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/u/UCX/UCX-1.8.1.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.8.1' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -# configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/hdfml_overlay/u/UCX/UCX-1.9.0.eb b/Overlays/hdfml_overlay/u/UCX/UCX-1.9.0.eb deleted file mode 100644 index cdf6510cf9b9cd517a3d306120d6215db617c86c..0000000000000000000000000000000000000000 --- a/Overlays/hdfml_overlay/u/UCX/UCX-1.9.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.9.0' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/jurecabooster_overlay/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2020.eb b/Overlays/jurecabooster_overlay/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2020.eb deleted file mode 100644 index d50d2f0981d9a8eb3960760eb560a8244c0996b1..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/b/Boost.Python/Boost.Python-1.74.0-gpsmpi-2020.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'EB_Boost' - -name = 'Boost.Python' -version = '1.74.0' - -homepage = 'https://boostorg.github.io/python' -description = """Boost.Python is a C++ library which enables seamless interoperability between C++ - and the Python programming language.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['boost_%s.tar.gz' % '_'.join(version.split('.'))] -patches = ['Boost-1.71.0_fix-Python3.patch'] - -dependencies = [ - ('Boost', version), - ('Python', '3.8.5') -] - -# With this version of GCC, bootstrap generates a b2 binary with illegal instructions on KNL, if the optimization flags -# are left enabled. This is a bit unsettling, since we could have more latent issues -preconfigopts = 'unset F90FLAGS FCFLAGS FFLAGS OPTFLAGS && export CXXFLAGS="-std=c++11" && ' - -only_python_bindings = True - -moduleclass = 'lib' diff --git a/Overlays/jurecabooster_overlay/b/Boost/Boost-1.74.0-gpsmpi-2020.eb b/Overlays/jurecabooster_overlay/b/Boost/Boost-1.74.0-gpsmpi-2020.eb deleted file mode 100644 index c4cf7e557724ee571fffb6b345b56f0788fef892..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/b/Boost/Boost-1.74.0-gpsmpi-2020.eb +++ /dev/null @@ -1,31 +0,0 @@ -name = 'Boost' -version = '1.74.0' - -homepage = 'https://www.boost.org/' -description = """Boost provides free peer-reviewed portable C++ source libraries.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True, 'usempi': True, 'cstd': 'c++11'} - -source_urls = [ - 'https://boostorg.jfrog.io/artifactory/main/release/%(version)s/source/'] -sources = ['%%(namelower)s_%s.tar.gz' % '_'.join(version.split('.'))] - -dependencies = [ - ('bzip2', '1.0.8'), - ('zlib', '1.2.11'), - ('XZ', '5.2.5'), - ('ICU', '67.1'), -] - -# With this version of GCC, bootstrap generates a b2 binary with illegal instructions on KNL, if the optimization flags -# are left enabled. This is a bit unsettling, since we could have more latent issues -preconfigopts = 'unset F90FLAGS FCFLAGS FFLAGS OPTFLAGS && export CXXFLAGS="-std=c++11" && ' -configopts = '--without-libraries=python' - -# also build boost_mpi -boost_mpi = True - -moduleclass = 'devel' diff --git a/Overlays/jurecabooster_overlay/g/GCC/GCC-10.3.0.eb b/Overlays/jurecabooster_overlay/g/GCC/GCC-10.3.0.eb deleted file mode 100644 index 388738574ebebbc6fbddc45bb03c858e7f904082..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/g/GCC/GCC-10.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'Bundle' - -name = 'GCC' -version = '10.3.0' - -homepage = 'https://gcc.gnu.org/' -description = """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, Java, and Ada, -as well as libraries for these languages (libstdc++, libgcj,...). -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -dependencies = [ - ('GCCcore', version), - # binutils built on top of GCCcore, which was built on top of (dummy-built) binutils - ('binutils', '2.36.1', '', ('GCCcore', version)), -] - -altroot = 'GCCcore' -altversion = 'GCCcore' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = 'family("compiler")' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' diff --git a/Overlays/jurecabooster_overlay/g/GCC/GCC-9.3.0.eb b/Overlays/jurecabooster_overlay/g/GCC/GCC-9.3.0.eb deleted file mode 100644 index 74b95b8771a9e8b0b3cd0e749739f121644d9df0..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/g/GCC/GCC-9.3.0.eb +++ /dev/null @@ -1,31 +0,0 @@ -easyblock = 'Bundle' - -name = 'GCC' -version = '9.3.0' - -homepage = 'http://gcc.gnu.org/' -description = """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, Java, and Ada, -as well as libraries for these languages (libstdc++, libgcj,...). -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -dependencies = [ - ('GCCcore', version), - # binutils built on top of GCCcore, which was built on top of (dummy-built) binutils - ('binutils', '2.34', '', ('GCCcore', version)), -] - -altroot = 'GCCcore' -altversion = 'GCCcore' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = 'family("compiler")' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -# this bundle serves as a compiler toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' diff --git a/Overlays/jurecabooster_overlay/g/GCCcore/GCCcore-10.3.0.eb b/Overlays/jurecabooster_overlay/g/GCCcore/GCCcore-10.3.0.eb deleted file mode 100644 index 8831d7f1db86a8ab608a98140d8079ce87f5c51f..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/g/GCCcore/GCCcore-10.3.0.eb +++ /dev/null @@ -1,74 +0,0 @@ -easyblock = 'EB_GCC' - -name = 'GCCcore' -version = '10.3.0' - -homepage = 'https://gcc.gnu.org/' -description = """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, Java, and Ada, -as well as libraries for these languages (libstdc++, libgcj,...). -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = [ - # GCC auto-resolving HTTP mirror - 'https://ftpmirror.gnu.org/gnu/gcc/gcc-%(version)s', - 'https://ftpmirror.gnu.org/gnu/gmp', # idem for GMP - 'https://ftpmirror.gnu.org/gnu/mpfr', # idem for MPFR - 'https://ftpmirror.gnu.org/gnu/mpc', # idem for MPC - 'ftp://gcc.gnu.org/pub/gcc/infrastructure/', # GCC dependencies - 'http://gcc.cybermirror.org/infrastructure/', # HTTP mirror for GCC dependencies - 'http://isl.gforge.inria.fr/', # original HTTP source for ISL - 'https://sourceware.org/pub/newlib/', # for newlib - 'https://github.com/MentorEmbedded/nvptx-tools/archive', # for nvptx-tools -] -sources = [ - 'gcc-%(version)s.tar.gz', - 'gmp-6.2.1.tar.bz2', - 'mpfr-4.1.0.tar.bz2', - 'mpc-1.2.1.tar.gz', - 'isl-0.23.tar.bz2', - 'newlib-4.1.0.tar.gz', - {'download_filename': 'd0524fb.tar.gz', - 'filename': 'nvptx-tools-20210115.tar.gz'}, -] -patches = [ - 'GCCcore-6.2.0-fix-find-isl.patch', - 'GCCcore-9.3.0_gmp-c99.patch', - 'gcc_10.3.0_pt.patch', # https://github.com/NVIDIA/nccl/issues/494 -] -checksums = [ - '8fcf994811ad4e5c7ac908e8cf62af2c1982319e5551f62ae72016064dacdf16', # gcc-10.3.0.tar.gz - 'eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c', # gmp-6.2.1.tar.bz2 - 'feced2d430dd5a97805fa289fed3fc8ff2b094c02d05287fd6133e7f1f0ec926', # mpfr-4.1.0.tar.bz2 - '17503d2c395dfcf106b622dc142683c1199431d095367c6aacba6eec30340459', # mpc-1.2.1.tar.gz - 'c58922c14ae7d0791a77932f377840890f19bc486b653fa64eba7f1026fb214d', # isl-0.23.tar.bz2 - 'f296e372f51324224d387cc116dc37a6bd397198756746f93a2b02e9a5d40154', # newlib-4.1.0.tar.gz - # nvptx-tools-20210115.tar.gz - '466abe1cef9cf294318ecb3c221593356f7a9e1674be987d576bc70d833d84a2', - # GCCcore-6.2.0-fix-find-isl.patch - '5ad909606d17d851c6ad629b4fddb6c1621844218b8d139fed18c502a7696c68', - # GCCcore-9.3.0_gmp-c99.patch - '0e135e1cc7cec701beea9d7d17a61bab34cfd496b4b555930016b98db99f922e', -] - -builddependencies = [ - ('M4', '1.4.18'), - ('binutils', '2.36.1'), -] - -languages = ['c', 'c++', 'fortran'] - -withisl = True -withnvptx = False - -# Perl is only required when building with NVPTX support -if withnvptx: - osdependencies = ['perl'] - -# Make sure we replace the system cc with gcc with an alias -modaliases = {'cc': 'gcc'} - -moduleclass = 'compiler' diff --git a/Overlays/jurecabooster_overlay/g/GCCcore/GCCcore-9.3.0.eb b/Overlays/jurecabooster_overlay/g/GCCcore/GCCcore-9.3.0.eb deleted file mode 100644 index 1ec4b5d7f8c3a96261ce03a5b3d1ec15f5aa308c..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/g/GCCcore/GCCcore-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'EB_GCC' - -name = 'GCCcore' -version = '9.3.0' -local_newlib_version = '3.3.0' - -homepage = 'https://gcc.gnu.org/' -description = """The GNU Compiler Collection includes front ends for C, C++, Objective-C, Fortran, Java, and Ada, -as well as libraries for these languages (libstdc++, libgcj,...). -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = [ - 'https://ftpmirror.gnu.org/gnu/gcc/gcc-%(version)s', # GCC auto-resolving HTTP mirror - 'https://ftpmirror.gnu.org/gnu/gmp', # idem for GMP - 'https://ftpmirror.gnu.org/gnu/mpfr', # idem for MPFR - 'https://ftpmirror.gnu.org/gnu/mpc', # idem for MPC - 'ftp://gcc.gnu.org/pub/gcc/infrastructure/', # GCC dependencies - 'http://gcc.cybermirror.org/infrastructure/', # HTTP mirror for GCC dependencies - 'http://isl.gforge.inria.fr/', # original HTTP source for ISL - 'https://sourceware.org/pub/newlib/', # for newlib - 'https://github.com/MentorEmbedded/nvptx-tools/archive', # for nvptx-tools -] -sources = [ - 'gcc-%(version)s.tar.gz', - 'gmp-6.2.0.tar.bz2', - 'mpfr-4.0.2.tar.bz2', - 'mpc-1.1.0.tar.gz', - 'isl-0.22.1.tar.bz2', - 'newlib-3.3.0.tar.gz', -] -patches = [ - 'GCCcore-6.2.0-fix-find-isl.patch', - 'GCCcore-8.3.0_fix-xsmin-ppc.patch', - 'GCCcore-%(version)s_gmp-c99.patch', - 'GCCcore-%(version)s_vect_broadcasts_masmintel.patch', -] -checksums = [ - '5258a9b6afe9463c2e56b9e8355b1a4bee125ca828b8078f910303bc2ef91fa6', # gcc-9.3.0.tar.gz - 'f51c99cb114deb21a60075ffb494c1a210eb9d7cb729ed042ddb7de9534451ea', # gmp-6.2.0.tar.bz2 - 'c05e3f02d09e0e9019384cdd58e0f19c64e6db1fd6f5ecf77b4b1c61ca253acc', # mpfr-4.0.2.tar.bz2 - '6985c538143c1208dcb1ac42cedad6ff52e267b47e5f970183a3e75125b43c2e', # mpc-1.1.0.tar.gz - '1a668ef92eb181a7c021e8531a3ca89fd71aa1b3744db56f68365ab0a224c5cd', # isl-0.22.1.tar.bz2 - '58dd9e3eaedf519360d92d84205c3deef0b3fc286685d1c562e245914ef72c66', # newlib-3.3.0.tar.gz - '5ad909606d17d851c6ad629b4fddb6c1621844218b8d139fed18c502a7696c68', # GCCcore-6.2.0-fix-find-isl.patch - 'bea1bce8f50ea6d51b038309eb61dec00a8681fb653d211c539be80f184609a3', # GCCcore-8.3.0_fix-xsmin-ppc.patch - '0e135e1cc7cec701beea9d7d17a61bab34cfd496b4b555930016b98db99f922e', # GCCcore-9.3.0_gmp-c99.patch - 'a32ac9c7d999a8b91bf93dba6a9d81b6ff58b3c89c425ff76090cbc90076685c', # GCCcore-9.3.0_vect_broadcasts_masmintel.patch -] - -builddependencies = [ - ('M4', '1.4.18'), - ('binutils', '2.34'), -] - -languages = ['c', 'c++', 'fortran'] - -withisl = True -withnvptx = False - -# Make sure we replace the system cc with gcc with an alias -modaliases = {'cc': 'gcc'} - -moduleclass = 'compiler' diff --git a/Overlays/jurecabooster_overlay/h/hwloc/hwloc-2.2.0-GCCcore-9.3.0.eb b/Overlays/jurecabooster_overlay/h/hwloc/hwloc-2.2.0-GCCcore-9.3.0.eb deleted file mode 100644 index f4a477b041e07d1c7b4bf0a2731f55a7367eb69d..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/h/hwloc/hwloc-2.2.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'hwloc' -version = '2.2.0' - -homepage = 'http://www.open-mpi.org/projects/hwloc/' - -description = """ - The Portable Hardware Locality (hwloc) software package provides a portable - abstraction (across OS, versions, architectures, ...) of the hierarchical - topology of modern architectures, including NUMA memory nodes, sockets, shared - caches, cores and simultaneous multithreading. It also gathers various system - attributes such as cache and memory information as well as the locality of I/O - devices such as network interfaces, InfiniBand HCAs or GPUs. It primarily - aims at helping applications with gathering information about modern computing - hardware so as to exploit it accordingly and efficiently. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -source_urls = ['https://www.open-mpi.org/software/hwloc/v%(version_major_minor)s/downloads/'] -sources = [SOURCE_TAR_GZ] - -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - ('numactl', '2.0.13', '', SYSTEM), - ('libxml2', '2.9.10'), - ('libpciaccess', '0.16'), -] - -configopts = "--enable-libnuma=$EBROOTNUMACTL --enable-opencl" -configopts += "--disable-cairo --disable-gl --disable-libudev " - -sanity_check_paths = { - 'files': ['bin/lstopo', 'include/hwloc/linux.h', - 'lib/libhwloc.%s' % SHLIB_EXT], - 'dirs': ['share/man/man3'], -} -sanity_check_commands = ['lstopo'] - -modluafooter = ''' -add_property("arch","gpu") -''' -moduleclass = 'system' diff --git a/Overlays/jurecabooster_overlay/i/impi-settings/impi-settings-2019-plain.eb b/Overlays/jurecabooster_overlay/i/impi-settings/impi-settings-2019-plain.eb deleted file mode 100644 index a6977adf3fc0ba49e6dc4fa056ba2728ca6b5d32..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/i/impi-settings/impi-settings-2019-plain.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2019' -versionsuffix = 'plain' - -homepage = '' -description = """This is a module to load the default IntelMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Overlays/jurecabooster_overlay/i/impi-settings/impi-settings-2021-plain.eb b/Overlays/jurecabooster_overlay/i/impi-settings/impi-settings-2021-plain.eb deleted file mode 100644 index 80e289b376fe16a97352f5f8566bb0a54ff97d78..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/i/impi-settings/impi-settings-2021-plain.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'impi-settings' -version = '2021' -versionsuffix = 'plain' - -homepage = '' -description = """This is a module to load the default IntelMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'I_MPI_PMI_VALUE_LENGTH_MAX': '900', - # Needed for PSM and harmless for InfiniBand. For ParaStation it is set on the pscom module - 'HFI_NO_CPUAFFINITY': 'YES', -} - -moduleclass = 'system' diff --git a/Overlays/jurecabooster_overlay/i/impi/impi-2019.8.254-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/jurecabooster_overlay/i/impi/impi-2019.8.254-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 1c9661c2bb770cca75e24786b22970611b362a70..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/i/impi/impi-2019.8.254-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,44 +0,0 @@ -name = 'impi' -version = '2019.8.254' - -homepage = 'http://software.intel.com/en-us/intel-mpi-library/' -description = """The Intel(R) MPI Library for Linux* OS is a multi-fabric message - passing library based on ANL MPICH2 and OSU MVAPICH2. The Intel MPI Library for - Linux OS implements the Message Passing Interface, version 2 (MPI-2) specification. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'version': '2020.2.254-GCC-9.3.0', 'name': 'iccifort'} - -sources = ['l_mpi_%(version)s.tgz'] - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'IntelMPI' -# Add a family for our naming scheme -modluafooter = 'family("mpi")' -dontcreateinstalldir = 'True' -# set up all the mpi commands to work as expected -set_mpi_wrappers_all = 'True' - -postinstallcmds = [ - "ln -s %(installdir)s/intel64/lib/release/libmpi.so %(installdir)s/intel64/lib/", - "ln -s %(installdir)s/intel64/lib %(installdir)s", - "ln -s %(installdir)s/intel64/include %(installdir)s", - "rm -Rf %(installdir)s/bin", - "ln -s %(installdir)s/intel64/bin %(installdir)s", -] - -modextrapaths = { - 'CLASSPATH': 'intel64/lib/mpi.jar', -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/i/impi/impi-2021.2.0-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Overlays/jurecabooster_overlay/i/impi/impi-2021.2.0-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index fa7288d8a0c73550a7cfac461b6585c128ea1dbf..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/i/impi/impi-2021.2.0-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,18 +0,0 @@ -name = 'impi' -version = '2021.2.0' - -homepage = 'https://software.intel.com/content/www/us/en/develop/tools/mpi-library.html' -description = "Intel MPI Library, compatible with MPICH ABI" - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/17729/'] -sources = ['l_mpi_oneapi_p_%(version)s.215_offline.sh'] -checksums = ['d0d4cdd11edaff2e7285e38f537defccff38e37a3067c02f4af43a3629ad4aa3'] - -dontcreateinstalldir = 'True' -# set up all the mpi commands to work as expected -set_mpi_wrappers_all = 'True' - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/l/libfabric/libfabric-1.12.1-GCCcore-9.3.0.eb b/Overlays/jurecabooster_overlay/l/libfabric/libfabric-1.12.1-GCCcore-9.3.0.eb deleted file mode 100644 index 137a9a69cab2496058f512acbedd4163309d8600..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/l/libfabric/libfabric-1.12.1-GCCcore-9.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'libfabric' -version = '1.12.1' - -homepage = 'https://ofiwg.github.io/libfabric/' -description = """ -Libfabric is a core component of OFI. It is the library that defines and exports -the user-space API of OFI, and is typically the only software that applications -deal with directly. It works in conjunction with provider libraries, which are -often integrated directly into libfabric. -""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} -toolchainopts = {'pic': True} - -github_account = 'ofiwg' -source_urls = ['https://github.com/ofiwg/%(name)s/releases/download/v%(version)s'] -sources = [SOURCE_TAR_BZ2] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [OS_PKG_IBVERBS_DEV] - -# Disable deprecated "sockets" provider -configopts = "--disable-sockets" - -sanity_check_paths = { - 'files': ['bin/fi_info', 'bin/fi_pingpong', 'bin/fi_strerror'] + - ['lib/libfabric.%s' % x for x in ['a', SHLIB_EXT]], - 'dirs': ['include/rdma', 'lib/pkgconfig', 'share'] -} - -sanity_check_commands = ['fi_info'] - -moduleclass = 'lib' diff --git a/Overlays/jurecabooster_overlay/n/NAMD/NAMD-2.14-iimpi-2020.eb b/Overlays/jurecabooster_overlay/n/NAMD/NAMD-2.14-iimpi-2020.eb deleted file mode 100644 index f7753763ee90d07e5a0211af2e4ad3d876619caf..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/n/NAMD/NAMD-2.14-iimpi-2020.eb +++ /dev/null @@ -1,49 +0,0 @@ -name = 'NAMD' -version = '2.14' - -homepage = 'http://www.ks.uiuc.edu/Research/namd/' -description = """NAMD is a parallel molecular dynamics code designed for high-performance simulation of large -biomolecular systems. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True, 'cstd': 'c++11'} - -runtest = False - -sources = ['NAMD_%(version)s_Source.tar.gz'] - -patches = ['namd-2.14-fix-gnu-casting.patch'] - -group = "namd" - -dependencies = [ - ('Tcl', '8.6.10'), - ('FFTW', '3.3.8'), -] - -charm_arch = 'mpi-linux-x86_64' - -namd_cfg_opts = " --with-tcl --tcl-prefix $EBROOTTCL " - -namd_basearch = 'Linux-KNL' - -prebuildopts = 'echo "TCLLIB=-L\$(TCLDIR)/lib -ltcl8.6 -ldl -lpthread" >> Make.config && ' -# Using Linux-KNL is not enough, as CXX gets redefined and therefore -DNAMD_KNL (defined -# in arch/Linux-KNL-icc.arch) gets lost -prebuildopts += 'echo "CXX+=-DNAMD_KNL" >> Make.config && ' -prebuildopts += 'echo "COPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' -prebuildopts += 'echo "CXXOPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' - -# In JURECA we should use srun. charmrun results in serial executions -postinstallcmds = ['rm %(installdir)s/charmrun'] - -# We must overwrite the default sanity check, otherwise if fails because it can't find charmrun -sanity_check_paths = { - 'files': ['flipbinpdb', 'flipdcd', 'namd%s' % version.split('.')[0], 'psfgen'], - 'dirs': ['inc'], -} - -moduleclass = 'chem' diff --git a/Overlays/jurecabooster_overlay/n/NAMD/NAMD-2.14-ipsmpi-2020.eb b/Overlays/jurecabooster_overlay/n/NAMD/NAMD-2.14-ipsmpi-2020.eb deleted file mode 100644 index 6af5f8c26d39407e2eb5c3ee6925499582c5c940..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/n/NAMD/NAMD-2.14-ipsmpi-2020.eb +++ /dev/null @@ -1,49 +0,0 @@ -name = 'NAMD' -version = '2.14' - -homepage = 'http://www.ks.uiuc.edu/Research/namd/' -description = """NAMD is a parallel molecular dynamics code designed for high-performance simulation of large -biomolecular systems. -""" - -site_contacts = 'a.strube@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'opt': True, 'pic': True, 'usempi': True, 'cstd': 'c++11'} - -runtest = False - -sources = ['NAMD_%(version)s_Source.tar.gz'] - -patches = ['namd-2.14-fix-gnu-casting.patch'] - -group = "namd" - -dependencies = [ - ('Tcl', '8.6.10'), - ('FFTW', '3.3.8'), -] - -charm_arch = 'mpi-linux-x86_64' - -namd_cfg_opts = " --with-tcl --tcl-prefix $EBROOTTCL " - -namd_basearch = 'Linux-KNL' - -prebuildopts = 'echo "TCLLIB=-L\$(TCLDIR)/lib -ltcl8.6 -ldl -lpthread" >> Make.config && ' -# Using Linux-KNL is not enough, as CXX gets redefined and therefore -DNAMD_KNL (defined -# in arch/Linux-KNL-icc.arch) gets lost -prebuildopts += 'echo "CXX+=-DNAMD_KNL" >> Make.config && ' -prebuildopts += 'echo "COPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' -prebuildopts += 'echo "CXXOPTS+=-DNAMD_DISABLE_SSE" >> Make.config && ' - -# In JURECA we should use srun. charmrun results in serial executions -postinstallcmds = ['rm %(installdir)s/charmrun'] - -# We must overwrite the default sanity check, otherwise if fails because it can't find charmrun -sanity_check_paths = { - 'files': ['flipbinpdb', 'flipdcd', 'namd%s' % version.split('.')[0], 'psfgen'], - 'dirs': ['inc'], -} - -moduleclass = 'chem' diff --git a/Overlays/jurecabooster_overlay/o/OpenGL/OpenGL-2020-GCCcore-10.3.0.eb b/Overlays/jurecabooster_overlay/o/OpenGL/OpenGL-2020-GCCcore-10.3.0.eb deleted file mode 100644 index a4cddfc4501b3d3564ae967ad36172284a225a14..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/o/OpenGL/OpenGL-2020-GCCcore-10.3.0.eb +++ /dev/null @@ -1,207 +0,0 @@ -easyblock = 'Bundle' - -name = 'OpenGL' -version = '2020' - -homepage = 'http://www.opengl.org/' -description = """ -Open Graphics Library (OpenGL) is a cross-language, cross-platform application programming interface (API) for rendering -2D and 3D vector graphics. Mesa is an open-source implementation of the OpenGL specification - a system for rendering -interactive 3D graphics. NVIDIA supports OpenGL and a complete set of OpenGL extensions, designed to give a maximum -performance on NVIDIA GPUs. - -This is a GL vendor neutral dispatch (GLVND) installation with Mesa and NVIDIA in the same lib-directory. Mesa or NVIDIA -OpenGL is set individually for each XScreen. -""" - -site_contacts = 'Jens Henrik Goebbert <j.goebbert@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} -# swr detects and builds parts specific for AVX and AVX2. If we use -# -xHost, this always gets overwritten and will fail. -toolchainopts = {'optarch': False} - -builddependencies = [ - ('Python', '3.8.5'), - ('binutils', '2.36.1'), - ('flex', '2.6.4'), - ('Bison', '3.7.6'), - ('Autotools', '20200321'), - ('pkg-config', '0.29.2'), - ('expat', '2.2.9'), - ('libxml2', '2.9.10'), - ('Meson', '0.55.0', '-Python-%(pyver)s'), - ('Ninja', '1.10.0'), - ('CMake', '3.18.0', '', SYSTEM), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('nettle', '3.6'), - ('libdrm', '2.4.106'), - ('LLVM', '10.0.1'), - ('X11', '20200222'), - ('libunwind', '1.4.0'), -] - -default_easyblock = 'ConfigureMake' - -default_component_specs = { - 'sources': [SOURCE_TAR_GZ], - 'start_dir': '%(name)s-%(version)s', -} - -local_pkg_config = ('export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:%(installdir)s/lib/pkgconfig && ' - 'export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:%(installdir)s/lib64/pkgconfig && ') - -components = [ - # A vendor neutral dispatch layer - ('libglvnd', '1.3.2', { - 'source_urls': [ - 'https://gitlab.freedesktop.org/glvnd/libglvnd/-/archive/v%(version)s/' - ], - 'sources': ['%(name)s-v%(version)s.tar.gz'], - 'start_dir': '%(name)s-v%(version)s', - 'checksums': ['6332c27f4c792b09a3eb1d7ae18c2d6ff6a0acaf3a746489caf859e659fca2f7'], - 'preconfigopts': './autogen.sh && ' - }), - # Mesa for software rendering, not hardware rendering. - ('Mesa', '20.1.4', { - # We build: - # - llvmpipe: the high-performance Gallium LLVM driver (only possible with glx=gallium-xlib) - # - swr: Intel's OpenSWR - 'easyblock': 'MesonNinja', - 'source_urls': [ - 'https://mesa.freedesktop.org/archive/', - 'https://mesa.freedesktop.org/archive/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/older-versions/%(version_major)s.x/%(version)s', - 'ftp://ftp.freedesktop.org/pub/mesa/older-versions/%(version_major)s.x', - ], - 'sources': [SOURCELOWER_TAR_XZ], - 'checksums': [ - '6800271c2be2a0447510eb4e9b67edd9521859a4d565310617c4b359eb6799fe', - ], - 'start_dir': '%(namelower)s-%(version)s', - 'separate_build_dir': True, - 'preconfigopts': local_pkg_config, - 'configopts': (' -D libdir=%(installdir)s/lib' - ' -D gbm=true' - ' -D glx=auto' - ' -D osmesa=gallium' - ' -D llvm=true' - ' -D shared-llvm=true' - ' -D dri-drivers=""' - ' -D gallium-drivers="swr,swrast"' - ' -D swr-arches=avx,avx2,skx,knl' - ' -D vulkan-drivers=""' - ' -D platforms=x11,drm,surfaceless' - ' -D glvnd=true' - ' -D libunwind=true' - ' -D egl=true' - ' -D gles1=true -Dgles2=true' - ' -D shared-glapi=true' - ' -D gallium-omx=disabled' - ' -D gallium-media=omx,va,vdpau,xvmc' - ' -D buildtype=release'), - }), - # OpenGL Utility Library - offers higher level GL-graphics functions - ('glu', '9.0.1', { - 'preconfigopts': local_pkg_config, - 'source_urls': [ - 'ftp://ftp.freedesktop.org/pub/mesa/glu/' - ], - 'sources': [ - 'glu-%(version)s.tar.gz' - ], - 'checksums': [ - 'f6f484cfcd51e489afe88031afdea1e173aa652697e4c19ddbcb8260579a10f7', - ], - }), - # OpenGL Extension Wrangler Library - determines which OpenGL extensions are supported at run-time - # This is just GLEW for GLX (which requires DISPLAY to be set) and not GLEW for EGL as GLEW selects GLX/EGL at - # compile-time and not run-time (https://github.com/nigels-com/glew/issues/172#issuecomment-357400019) - # Compile+Load GLEW-EGL on top to enable GLEW for EGL - ('glew', '2.2.0', { - 'source_urls': [ - 'https://sourceforge.net/projects/glew/files/glew/snapshots/', - ], - 'sources': [ - 'glew-20200115.tgz', - ], - 'checksums': [ - '314219ba1db50d49b99705e8eb00e83b230ee7e2135289a00b5b570e4a4db43a', - ], - 'skipsteps': ['configure'], - 'buildopts': ('GLEW_PREFIX=%(installdir)s GLEW_DEST=%(installdir)s LIBDIR=%(installdir)s/lib ' - 'LDFLAGS.EXTRA="-L${EBROOTX11}/lib/ -lX11" LDFLAGS.GL="-L%(installdir)s/lib -lGL"'), - 'installopts': 'GLEW_PREFIX=%(installdir)s GLEW_DEST=%(installdir)s LIBDIR=%(installdir)s/lib ', - 'install_cmd': 'make install.all ', - }), - # MESA demos - offers the important command 'eglinfo' - ('demos', '95c1a57cfdd1ef2852c828cba4659a72575c5c5d', { - 'source_urls': [ - 'https://gitlab.freedesktop.org/mesa/demos/-/archive/%(version)s/', - ], - 'sources': [SOURCELOWER_TAR_GZ], - 'checksums': [ - '7738beca8f6f6981ba04c8a22fde24d69d6b2aaab1758ac695c9475bf704249c', - ], - 'preconfigopts': ('./autogen.sh && ' + - local_pkg_config + - 'GLEW_CFLAGS="-I%(installdir)s/include/" GLEW_LIBS="-L%(installdir)s/lib/ -lGLEW -lGL" ' - 'EGL_CFLAGS="-I%(installdir)s/include/" EGL_LIBS="-L%(installdir)s/lib/ -lEGL" '), - 'configopts': '--disable-osmesa ', - }), -] - -postinstallcmds = [ - 'cd %(installdir)s/lib && ln -sf libGL.so.1.7.0 libGL.so.1', - 'rm %(installdir)s/lib/*.la', - 'cd %(installdir)s/lib && ln -sf libGLX_mesa.so.0 libGLX_indirect.so.0', - # EGL vendor ICDs - ( - '{ cat > %(installdir)s/share/glvnd/egl_vendor.d/50_mesa.json; } << \'EOF\'\n' - '{\n' - ' \"file_format_version\" : \"1.0.0\",\n' - ' \"ICD\" : {\n' - ' \"library_path\" : \"libEGL_mesa.so.0\"\n' - ' }\n' - '}\n' - 'EOF' - ), - # correct pkg-config of GLEW - 'sed -i "/^libdir=/c\libdir=\${exec_prefix}\/lib" %(installdir)s/lib/pkgconfig/glew.pc', - 'sed -i "/^prefix=/c\prefix=%(installdir)s" %(installdir)s/lib/pkgconfig/glew.pc', -] - -modextravars = { - '__EGL_VENDOR_LIBRARY_FILENAMES': '%(installdir)s/share/glvnd/egl_vendor.d/50_mesa.json', - 'EGL_PLATFORM': 'surfaceless', - 'EGL_DRIVER': 'swr', - 'EGL_LOG_LEVEL': 'fatal', - 'GALLIUM_DRIVER': 'swr', - 'KNOB_MAX_WORKER_THREADS': '65535', -} - -modluafooter = ''' -conflict("Mesa") -conflict("libGLU") -''' - -sanity_check_paths = { - 'files': [ - 'lib/libEGL_mesa.%s' % SHLIB_EXT, 'lib/libOSMesa.%s' % SHLIB_EXT, - 'lib/libGLESv1_CM.%s' % SHLIB_EXT, 'lib/libGLESv2.%s' % SHLIB_EXT, - 'include/GL/glext.h', 'include/GL/glx.h', - 'include/GL/osmesa.h', 'include/GL/gl.h', 'include/GL/glxext.h', - 'include/GLES/gl.h', 'include/GLES2/gl2.h', 'include/GLES3/gl3.h', - 'lib/libOpenGL.%s' % SHLIB_EXT, - 'lib/libGLEW.a', 'lib/libGLEW.%s' % SHLIB_EXT, - 'bin/glewinfo', 'bin/visualinfo', - 'include/GL/glew.h', 'include/GL/glxew.h', 'include/GL/wglew.h', - ], - 'dirs': [] -} - -moduleclass = 'vis' diff --git a/Overlays/jurecabooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-PSM.eb b/Overlays/jurecabooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-PSM.eb deleted file mode 100644 index b771c23e1464004212ae8ce82f9522dc71613664..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-PSM.eb +++ /dev/null @@ -1,43 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'PSM' - -homepage = '' -description = """This is a module to load the default OpenMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': 'ofi', - 'OMPI_MCA_mtl': 'ofi', - # 'OMPI_MCA_btl_openib_allow_ib': '1', - # 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - # 'OMPI_MCA_coll': '^ml', - # 'OMPI_MCA_coll_hcoll_enable': '1', - # 'OMPI_MCA_coll_hcoll_np': '0', - # 'OMPI_MCA_pml': 'ucx', - # 'OMPI_MCA_osc': '^rdma', - # 'OMPI_MCA_opal_abort_print_stack': '1', - # 'OMPI_MCA_opal_set_max_sys_limits': '1', - # 'OMPI_MCA_opal_event_include': 'epoll', - # 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # OMPIO does not seem to work reliably on our system - 'OMPI_MCA_io': 'romio321', -} - -moduleclass = 'system' diff --git a/Overlays/jurecabooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb b/Overlays/jurecabooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb deleted file mode 100644 index 2107e78049053544c44d9cfd7ebfe02855f814a7..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb +++ /dev/null @@ -1,63 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -patches = ['psm2_pmix.patch'] - -osdependencies = [ - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('libevent', '2.1.12'), - ('libfabric', '1.12.1'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-psm2 ' -configopts += '--with-ofi=$EBROOTLIBFABRIC ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--disable-oshmem ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/jurecabooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 2c291283350f192b5de61d50b11a0041835ef243..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,63 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -patches = ['psm2_pmix.patch'] - -osdependencies = [ - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('libevent', '2.1.12'), - ('libfabric', '1.12.1'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-psm2 ' -configopts += '--with-ofi=$EBROOTLIBFABRIC ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--disable-oshmem ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/o/OpenMPI/psm2_pmix.patch b/Overlays/jurecabooster_overlay/o/OpenMPI/psm2_pmix.patch deleted file mode 100644 index 9689fbddb7690d964b1a7f41b80492a73360b397..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/o/OpenMPI/psm2_pmix.patch +++ /dev/null @@ -1,47 +0,0 @@ -diff -ruN openmpi-4.1.0rc1.orig/ompi/mca/mtl/psm2/mtl_psm2.c openmpi-4.1.0rc1.patched/ompi/mca/mtl/psm2/mtl_psm2.c ---- openmpi-4.1.0rc1.orig/ompi/mca/mtl/psm2/mtl_psm2.c 2020-07-06 23:56:33.000000000 +0200 -+++ openmpi-4.1.0rc1.patched/ompi/mca/mtl/psm2/mtl_psm2.c 2021-05-11 12:05:35.399831599 +0200 -@@ -26,6 +26,7 @@ - #include "ompi_config.h" - - #include "opal/mca/pmix/pmix.h" -+#include "opal/mca/pmix/pmix3x/pmix/include/pmix_common.h" - #include "ompi/mca/mtl/mtl.h" - #include "ompi/mca/mtl/base/mtl_base_datatype.h" - #include "opal/util/show_help.h" -@@ -102,6 +103,21 @@ - int rc; - - generated_key = getenv(OPAL_MCA_PREFIX"orte_precondition_transports"); -+ opal_process_name_t pname; -+ -+ generated_key = NULL; -+ pname.jobid = atoi(getenv("SLURM_JOBID")); -+ pname.vpid = OPAL_VPID_WILDCARD; -+ OPAL_MODEX_RECV_VALUE_OPTIONAL(rc, PMIX_CREDENTIAL, &pname, -+ (char**)&generated_key, PMIX_STRING); -+ -+ char *tmp_key; -+ if (PMIX_SUCCESS != rc || NULL == generated_key) { -+ if (NULL != (tmp_key = getenv("OMPI_MCA_orte_precondition_transports"))) { -+ generated_key = strdup(tmp_key); -+ } -+ } -+ - memset(uu, 0, sizeof(psm2_uuid_t)); - - if (!generated_key || (strlen(generated_key) != 33) || -@@ -111,10 +127,13 @@ - "no uuid present", true, - generated_key ? "could not be parsed from" : - "not present in", ompi_process_info.nodename); -+ free(generated_key); - return OMPI_ERROR; - - } - -+ free(generated_key); -+ - /* Handle our own errors for opening endpoints */ - psm2_error_register_handler(ompi_mtl_psm2.ep, ompi_mtl_psm2_errhandler); - diff --git a/Overlays/jurecabooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-plain.eb b/Overlays/jurecabooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-plain.eb deleted file mode 100644 index 8ee1cafa0c1c08b8f626cae99753f4de2b1c4521..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-plain.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'plain' - -homepage = '' -description = """This is a module to load the default ParaStationMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'PSP_READAHEAD': '4096', - 'PSP_HARD_ABORT': '1', - 'HFI_NO_CPUAFFINITY': '1', -} - -moduleclass = 'system' diff --git a/Overlays/jurecabooster_overlay/p/psmpi-settings/psmpi-settings-5.4-plain.eb b/Overlays/jurecabooster_overlay/p/psmpi-settings/psmpi-settings-5.4-plain.eb deleted file mode 100644 index c364c484685775bb80f6e4de7fe0f9c942edcf2c..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/p/psmpi-settings/psmpi-settings-5.4-plain.eb +++ /dev/null @@ -1,24 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'plain' - -homepage = '' -description = """This is a module to load the default ParaStationMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] - -modextravars = { - 'PSP_READAHEAD': '4096', - 'PSP_HARD_ABORT': '1', - 'HFI_NO_CPUAFFINITY': '1', -} - -moduleclass = 'system' diff --git a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb b/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb deleted file mode 100644 index fb6b12ede08f54b7a0990726654d0c2098412a95..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-GCC-9.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' -local_pscom_version = '5.4.6-1_gw' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = [ - 'https://github.com/ParaStation/psmpi/archive/', - 'https://github.com/ParaStation/pscom/archive/' -] -sources = [ - '%(version)s.tar.gz', - 'pscom-%s.tar.bz2' % local_pscom_version -] - -builddependencies = [('popt', '1.16', '', True)] - -pscom_allin_path = '%%(builddir)s/pscom-%s ' % local_pscom_version -pgo = True - -patches = [ - 'psmpi_shebang.patch', -] - -mpich_opts = '--enable-static' -configopts = '--with-pscom-builtin=psm2' - -threaded = False - - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb b/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb deleted file mode 100644 index 8a8f752ec0b6ad94e519f14178364503c9f967d3..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0-mt.eb +++ /dev/null @@ -1,50 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' -versionsuffix = '-mt' -local_pscom_version = '5.4.6-1_gw' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = [ - 'https://github.com/ParaStation/psmpi/archive/', - 'https://github.com/ParaStation/pscom/archive/' -] -sources = [ - '%(version)s.tar.gz', - 'pscom-%s.tar.bz2' % local_pscom_version -] - -builddependencies = [('popt', '1.16', '', True)] - -pscom_allin_path = '%%(builddir)s/pscom-%s ' % local_pscom_version -pgo = True - -patches = [ - 'psmpi_shebang.patch', -] - -mpich_opts = '--enable-static' -configopts = '--with-pscom-builtin=psm2' - -threaded = True - - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index e27a4f32e21ae5c408a5e07858d1bd8486734797..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.7-1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,49 +0,0 @@ -name = 'psmpi' -version = '5.4.7-1' -local_pscom_version = '5.4.6-1_gw' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = [ - 'https://github.com/ParaStation/psmpi/archive/', - 'https://github.com/ParaStation/pscom/archive/' -] -sources = [ - '%(version)s.tar.gz', - 'pscom-%s.tar.bz2' % local_pscom_version -] - -builddependencies = [('popt', '1.16', '', True)] - -pscom_allin_path = '%%(builddir)s/pscom-%s ' % local_pscom_version -pgo = True - -patches = [ - 'psmpi_shebang.patch', -] - -mpich_opts = '--enable-static' -configopts = '--with-pscom-builtin=psm2' - -threaded = False - - -# use a more explicit module name to help our naming scheme -modaltsoftname = 'ParaStationMPI' - -# Add a family for our naming scheme -modluafooter = ''' -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb b/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb deleted file mode 100644 index 2246bb667725189b634e20eda05683d5cbaa71ef..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-GCC-10.3.0.eb +++ /dev/null @@ -1,41 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' -local_pscom_version = '5.4.7-1_gw' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '10.3.0'} - -source_urls = [ - 'https://github.com/ParaStation/psmpi/archive/', - 'https://github.com/ParaStation/pscom/archive/' -] -sources = [ - '%(version)s.tar.gz', - 'pscom-%s.tar.bz2' % local_pscom_version -] - -builddependencies = [('popt', '1.16', '', True)] - -pscom_allin_path = '%%(builddir)s/pscom-%s ' % local_pscom_version -pgo = True - -patches = [ - 'psmpi_shebang.patch', -] - -mpich_opts = '--enable-static' -# Needed due to changes in GCC10. See https://github.com/pmodels/mpich/issues/4300 -preconfigopts = 'export FCFLAGS="-fallow-argument-mismatch $FCFLAGS" && ' -preconfigopts += 'export FFLAGS="-fallow-argument-mismatch $FFLAGS" && ' -configopts = '--with-pscom-builtin=psm2' - -threaded = False - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb b/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb deleted file mode 100644 index d6edb28fc25d610e3d83b6a38aa441264114b82a..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0-mt.eb +++ /dev/null @@ -1,39 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' -versionsuffix = '-mt' -local_pscom_version = '5.4.7-1_gw' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -source_urls = [ - 'https://github.com/ParaStation/psmpi/archive/', - 'https://github.com/ParaStation/pscom/archive/' -] -sources = [ - '%(version)s.tar.gz', - 'pscom-%s.tar.bz2' % local_pscom_version -] - -builddependencies = [('popt', '1.16', '', True)] - -pscom_allin_path = '%%(builddir)s/pscom-%s ' % local_pscom_version -pgo = True - -patches = [ - 'psmpi_shebang.patch', -] - -mpich_opts = '--enable-static' -configopts = '--with-pscom-builtin=psm2' - -threaded = True - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb b/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb deleted file mode 100644 index 8541698752710569b1bba3305427db9b299da0aa..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/p/psmpi/psmpi-5.4.9-1-intel-compilers-2021.2.0-GCC-10.3.0.eb +++ /dev/null @@ -1,38 +0,0 @@ -name = 'psmpi' -version = '5.4.9-1' -local_pscom_version = '5.4.7-1_gw' - -homepage = 'https://github.com/ParaStation/psmpi2' -description = """ParaStation MPI is an open source high-performance MPI 3.0 implementation, -based on MPICH v3. It provides extra low level communication libraries and integration with -various batch systems for tighter process control. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-compilers', 'version': '2021.2.0-GCC-10.3.0'} - -source_urls = [ - 'https://github.com/ParaStation/psmpi/archive/', - 'https://github.com/ParaStation/pscom/archive/' -] -sources = [ - '%(version)s.tar.gz', - 'pscom-%s.tar.bz2' % local_pscom_version -] - -builddependencies = [('popt', '1.16', '', True)] - -pscom_allin_path = '%%(builddir)s/pscom-%s ' % local_pscom_version -pgo = True - -patches = [ - 'psmpi_shebang.patch', -] - -mpich_opts = '--enable-static' -configopts = '--with-pscom-builtin=psm2' - -threaded = False - -moduleclass = 'mpi' diff --git a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-gpsmpi-2020.eb b/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-gpsmpi-2020.eb deleted file mode 100644 index 48d43c7a78d8e06160a8ad7beefcabf89e13a01e..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-gpsmpi-2020.eb +++ /dev/null @@ -1,51 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.5' - -homepage = 'http://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CubeWriter', '4.5'), -] - -dependencies = [ - ('CubeLib', '4.5'), - ('OTF2', '2.2'), - ('Score-P', '6.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-iimpi-2020.eb b/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-iimpi-2020.eb deleted file mode 100644 index 24ef5ef7a743d8dbbfffd9b7a49b3bc59b143c5d..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-iimpi-2020.eb +++ /dev/null @@ -1,51 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.5' - -homepage = 'http://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CubeWriter', '4.5'), -] - -dependencies = [ - ('CubeLib', '4.5'), - ('OTF2', '2.2'), - ('Score-P', '6.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-ipsmpi-2020.eb b/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-ipsmpi-2020.eb deleted file mode 100644 index e57546be31c7d516f5d2b8180689cc9841110d05..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.5-ipsmpi-2020.eb +++ /dev/null @@ -1,51 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.5' - -homepage = 'http://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = ['http://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] - -builddependencies = [ - ('CubeWriter', '4.5'), -] - -dependencies = [ - ('CubeLib', '4.5'), - ('OTF2', '2.2'), - ('Score-P', '6.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-gpsmpi-2020.eb b/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-gpsmpi-2020.eb deleted file mode 100644 index c01235b030ec11fb28bb56b7d331ecad96ee2e4d..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-gpsmpi-2020.eb +++ /dev/null @@ -1,58 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -# Building with architectural optimizations on the KNL booster prevents -# using the frontend tools on the frontend, and thus cross-compilation. -# But Score-P/Scalasca does not benefit much from vectorization anyway. -toolchainopts = {'optarch': False} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-iimpi-2020.eb b/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-iimpi-2020.eb deleted file mode 100644 index c5734b9a134e1894fd220652927d84b609041268..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-iimpi-2020.eb +++ /dev/null @@ -1,58 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2020'} -# Building with architectural optimizations on the KNL booster prevents -# using the frontend tools on the frontend, and thus cross-compilation. -# But Score-P/Scalasca does not benefit much from vectorization anyway. -toolchainopts = {'optarch': False} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-ipsmpi-2020.eb b/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-ipsmpi-2020.eb deleted file mode 100644 index b788c69bd7d19b9c35c42af6ed4a5eaf38d8f984..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Scalasca/Scalasca-2.6-ipsmpi-2020.eb +++ /dev/null @@ -1,58 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Scalasca' -version = '2.6' - -homepage = 'https://www.scalasca.org/' -description = """ -Scalasca is a software tool that supports the performance optimization of -parallel programs by measuring and analyzing their runtime behavior. The -analysis identifies potential performance bottlenecks -- in particular -those concerning communication and synchronization -- and offers guidance -in exploring their causes. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -# Building with architectural optimizations on the KNL booster prevents -# using the frontend tools on the frontend, and thus cross-compilation. -# But Score-P/Scalasca does not benefit much from vectorization anyway. -toolchainopts = {'optarch': False} - -source_urls = ['https://apps.fz-juelich.de/scalasca/releases/scalasca/%(version_major_minor)s/dist'] -sources = [SOURCELOWER_TAR_GZ] -checksums = [ - 'b3f9cb1d58f3e25090a39da777bae8ca2769fd10cbd6dfb9a4887d873ee2441e', # scalasca-2.6.tar.gz -] -builddependencies = [ - ('CubeWriter', '4.6'), -] - -dependencies = [ - ('CubeLib', '4.6'), - ('OTF2', '2.3'), - ('Score-P', '7.0'), -] - -sanity_check_paths = { - 'files': ['bin/scalasca', ('lib/libpearl.replay.a', 'lib64/libpearl.replay.a')], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scalasca/patterns'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-gpsmpi-2020.eb b/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-gpsmpi-2020.eb deleted file mode 100644 index ac6bd650a9f2211700d542fe2f66fb54385f86b1..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-gpsmpi-2020.eb +++ /dev/null @@ -1,63 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '6.0' - -homepage = 'http://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/scorep/'] -sources = ['scorep-%(version)s.tar.gz'] -patches = [ - 'Score-P-6.0_binutils_2.34_api_change.patch', -] - -builddependencies = [ - ('CubeLib', '4.5'), - ('CubeWriter', '4.5'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.5'), - ('OTF2', '2.2'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-iimpi-2020.eb b/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-iimpi-2020.eb deleted file mode 100644 index 25fcd4bcd5bfa5200cb2ef650e2ac46c3949283a..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-iimpi-2020.eb +++ /dev/null @@ -1,63 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '6.0' - -homepage = 'http://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/scorep/'] -sources = ['scorep-%(version)s.tar.gz'] -patches = [ - 'Score-P-6.0_binutils_2.34_api_change.patch', -] - -builddependencies = [ - ('CubeLib', '4.5'), - ('CubeWriter', '4.5'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.5'), - ('OTF2', '2.2'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-ipsmpi-2020.eb b/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-ipsmpi-2020.eb deleted file mode 100644 index 6234f363fb51bf5a30b3790aca575cd5aaeda59e..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-6.0-ipsmpi-2020.eb +++ /dev/null @@ -1,63 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2018 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '6.0' - -homepage = 'http://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} - -source_urls = ['https://www.vi-hps.org/cms/upload/packages/scorep/'] -sources = ['scorep-%(version)s.tar.gz'] -patches = [ - 'Score-P-6.0_binutils_2.34_api_change.patch', -] - -builddependencies = [ - ('CubeLib', '4.5'), - ('CubeWriter', '4.5'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.5'), - ('OTF2', '2.2'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-gpsmpi-2020.eb b/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-gpsmpi-2020.eb deleted file mode 100644 index 3051bc72961a420a8328be1f59627acc7fee6093..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-gpsmpi-2020.eb +++ /dev/null @@ -1,68 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -# Building with architectural optimizations on the KNL booster prevents -# using the frontend tools on the frontend, and thus cross-compilation. -# But Score-P/Scalasca does not benefit much from vectorization anyway. -toolchainopts = {'optarch': False} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-iimpi-2020.eb b/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-iimpi-2020.eb deleted file mode 100644 index cb4d70ebe71d87a5626bd264ea926182f3891bfd..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-iimpi-2020.eb +++ /dev/null @@ -1,68 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'iimpi', 'version': '2020'} -# Building with architectural optimizations on the KNL booster prevents -# using the frontend tools on the frontend, and thus cross-compilation. -# But Score-P/Scalasca does not benefit much from vectorization anyway. -toolchainopts = {'optarch': False} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-ipsmpi-2020.eb b/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-ipsmpi-2020.eb deleted file mode 100644 index 581ee7bb07cc9631f8f1482eb7fd852dfc4591e7..0000000000000000000000000000000000000000 --- a/Overlays/jurecabooster_overlay/s/Score-P/Score-P-7.0-ipsmpi-2020.eb +++ /dev/null @@ -1,68 +0,0 @@ -## -# This is an easyconfig file for EasyBuild, see https://github.com/easybuilders/easybuild -# Copyright:: Copyright 2013-2021 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# Markus Geimer <m.geimer@fz-juelich.de> -# Christian Feld <c.feld@fz-juelich.de> -# License:: 3-clause BSD -# -# This work is based on experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## - -easyblock = 'EB_Score_minus_P' - -name = 'Score-P' -version = '7.0' - -homepage = 'https://www.score-p.org' -description = """ -The Score-P measurement infrastructure is a highly scalable and easy-to-use -tool suite for profiling, event tracing, and online analysis of HPC -applications. -""" - -site_contacts = 'Software Analysis and Tools <swat@fz-juelich.de>' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -# Building with architectural optimizations on the KNL booster prevents -# using the frontend tools on the frontend, and thus cross-compilation. -# But Score-P/Scalasca does not benefit much from vectorization anyway. -toolchainopts = {'optarch': False} - -source_urls = ['http://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-%(version)s'] -sources = ['scorep-%(version)s.tar.gz'] -checksums = [ - '68f24a68eb6f94eaecf500e17448f566031946deab74f2cba072ee8368af0996', # scorep-7.0.tar.gz -] - -builddependencies = [ - ('CubeLib', '4.6'), - ('CubeWriter', '4.6'), - # Unwinding/sampling support (optional): - ('libunwind', '1.4.0'), -] - -dependencies = [ - # binutils is implicitly available via GCC toolchain - ('OPARI2', '2.0.6'), - ('OTF2', '2.3'), - # Hardware counter support (optional): - ('PAPI', '6.0.0'), - # PDT source-to-source instrumentation support (optional): - ('PDT', '3.25'), -] - -configopts = '--enable-shared ' - -sanity_check_paths = { - 'files': ['bin/scorep', 'include/scorep/SCOREP_User.h', - ('lib/libscorep_adapter_mpi_event.a', 'lib64/libscorep_adapter_mpi_event.a'), - ('lib/libscorep_adapter_mpi_event.%s' % SHLIB_EXT, 'lib64/libscorep_adapter_mpi_event.%s' % SHLIB_EXT)], - 'dirs': [], -} - -# Ensure that local metric documentation is found by CubeGUI -modextrapaths = {'CUBE_DOCPATH': 'share/doc/scorep/profile'} - -moduleclass = 'perf' diff --git a/Overlays/jurecadc_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb b/Overlays/jurecadc_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb deleted file mode 100644 index 6e70c8f5427ba06c25874a8406235be946eb50b6..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'Doxygen' -version = '1.8.18' - -homepage = 'http://www.doxygen.org' -description = """Doxygen is a documentation system for C++, C, Java, Objective-C, Python, - IDL (Corba and Microsoft flavors), Fortran, VHDL, PHP, C#, and to some extent D. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = ['%(namelower)s-%(version)s.src.tar.gz'] -source_urls = ['http://doxygen.nl/files/'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - # flex 2.6.4 provokes a segmentation fault in M4, when compiling Doxygen in CentOS8, so we fall back to 2.6.3 - ('flex', '2.6.3'), - ('Bison', '3.7.6'), -] - -moduleclass = 'devel' diff --git a/Overlays/jurecadc_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb b/Overlays/jurecadc_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb deleted file mode 100644 index c088cee99904ef9cc765b9d50f30006106492d51..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'Doxygen' -version = '1.8.18' - -homepage = 'http://www.doxygen.org' -description = """Doxygen is a documentation system for C++, C, Java, Objective-C, Python, - IDL (Corba and Microsoft flavors), Fortran, VHDL, PHP, C#, and to some extent D. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = ['%(namelower)s-%(version)s.src.tar.gz'] -source_urls = ['http://doxygen.nl/files/'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - # flex 2.6.4 provokes a segmentation fault in M4, when compiling Doxygen in CentOS8, so we fall back to 2.6.3 - ('flex', '2.6.3'), - ('Bison', '3.6.4'), -] - -moduleclass = 'devel' diff --git a/Overlays/jurecadc_overlay/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb b/Overlays/jurecadc_overlay/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb deleted file mode 100644 index 80f1ba62377bc954f4dea278ef460e164e57e66c..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb +++ /dev/null @@ -1,97 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' -versionsuffix = '-gpu' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_gf_lp64 --lmkl_sequential -lmkl_core -lmkl_blacs_intelmpi_lp64 --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_80" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'math' diff --git a/Overlays/jurecadc_overlay/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb b/Overlays/jurecadc_overlay/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb deleted file mode 100644 index a4144ed481fc8771c47e3dc832cd3f285b0f4636..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb +++ /dev/null @@ -1,96 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' -versionsuffix = '-gpu' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_intel_lp64 --lmkl_sequential -lmkl_core -lmkl_blacs_intelmpi_lp64 --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_80" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'math' diff --git a/Overlays/jurecadc_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb b/Overlays/jurecadc_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb deleted file mode 100644 index edef15b1e9d240917a98d3aa3761e126b43eec31..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because AMD zen processors do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Overlays/jurecadc_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb b/Overlays/jurecadc_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb deleted file mode 100644 index fd45dceca6a1fddc684a6100db02901943a40010..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because AMD zen processors do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb b/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb deleted file mode 100644 index d1cd2d234aa3003b20e5ec3ca3b519616fc95ad0..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb b/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb deleted file mode 100644 index 5502b43c6c03ece499621671ab45e24840c39c78..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb b/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb deleted file mode 100644 index 5c44a9055c120e2d15382588bbeaa6f0668fe39b..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb b/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb deleted file mode 100644 index 62645ee3f76e5b2e60b8aae2e9569a597f81c430..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/jurecadc_overlay/h/HDF5/skip_fortran_fp_kind_test.patch b/Overlays/jurecadc_overlay/h/HDF5/skip_fortran_fp_kind_test.patch deleted file mode 100644 index cc1b1035394e9d9c59914ee1810854ba6a75c4cd..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/h/HDF5/skip_fortran_fp_kind_test.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -Nru hdf5-1.10.6_orig/fortran/test/H5_test_buildiface.F90 hdf5-1.10.6/fortran/test/H5_test_buildiface.F90 ---- hdf5-1.10.6_orig/fortran/test/H5_test_buildiface.F90 2020-10-21 12:16:45.673723000 +0200 -+++ hdf5-1.10.6/fortran/test/H5_test_buildiface.F90 2020-10-21 13:09:22.455045487 +0200 -@@ -215,7 +215,7 @@ - WRITE(11,'(A)') ' INTEGER, OPTIONAL, INTENT( IN ) :: ulp' - WRITE(11,'(A)') ' IF ( PRESENT( ulp ) ) Rel = REAL( ABS(ulp), '//TRIM(ADJUSTL(chr2))//')' - WRITE(11,'(A)') ' Rel = 1.0_'//TRIM(ADJUSTL(chr2)) -- WRITE(11,'(A)') ' real_eq_kind_'//TRIM(ADJUSTL(chr2))//' = ABS( a - b ) < ( Rel * SPACING( MAX(ABS(a),ABS(b)) ) )' -+ WRITE(11,'(A)') ' real_eq_kind_'//TRIM(ADJUSTL(chr2))//' = 0 < 1' - WRITE(11,'(A)') ' END FUNCTION real_eq_kind_'//TRIM(ADJUSTL(chr2)) - ENDDO - diff --git a/Overlays/jurecadc_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/jurecadc_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 14f9dd1ef8e328d779c3f7fe85f19bb9c1909a19..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,64 +0,0 @@ -name = 'iccifort' -version = '2020.2.254' -versionsuffix = '-GCC-9.3.0' - -homepage = 'https://software.intel.com/en-us/intel-compilers/' -description = "Intel C, C++ & Fortran compilers" - -modloadmsg = ''' -We have observed situations where the Intel compiler does not enable vectorization and is overly -conservative when applying optimizations on AMD systems when using -xHost. Other sites report -similar issues also with -xCORE-AVX2. -Our preeliminary results indicate that -march=core-avx2 works as expected and delivers good -performance. - -Please report performance problems on our installed stack with Intel compilers to sc@fz-juelich.de -and keep it in mind when compiling your code with Intel compilers in this system. -''' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16744/'] -sources = ['parallel_studio_xe_%(version_major)s_update%(version_minor)s_cluster_edition.tgz'] -patches = ['iccifort-%(version)s_no_mpi_rt_dependency.patch'] -checksums = [ - # parallel_studio_xe_2020_update2_composer_edition.tgz - '4795c44374e8988b91da20ac8f13022d7d773461def4a26ca210a8694f69f133', - # iccifort-2020.2.254_no_mpi_rt_dependency.patch - '73e582d9e108d0680c19c14e9a9c49dbbb06829e39ba8ed87bfd6b4222231196', -] - -local_gccver = '9.3.0' - -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), -] - -# list of regex for components to install -# full list of components can be obtained from pset/mediaconfig.xml in unpacked sources -# cfr. https://software.intel.com/en-us/articles/intel-composer-xe-2015-silent-installation-guide -components = [ - 'intel-comp', 'intel-ccomp', 'intel-fcomp', 'intel-icc', 'intel-ifort', - 'intel-openmp', 'intel-ipsc?_', 'intel-gdb(?!.*mic)' -] - -dontcreateinstalldir = True - -# disable data collection -modextravars = { - 'INTEL_DISABLE_ISIP': '1' -} - -# We have a custom naming scheme that allows us to use a more descriptive module name -modaltsoftname = 'Intel' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = 'family("compiler")' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -moduleclass = 'compiler' diff --git a/Overlays/jurecadc_overlay/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb b/Overlays/jurecadc_overlay/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb deleted file mode 100644 index 35a3b40805f3d2068ff0cc8d4ec8bedb40544b0f..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb +++ /dev/null @@ -1,168 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '24Dec2020' -versionsuffix = '-CUDA' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'patch_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -cuda_compute_capabilities = ['8.0'] - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# Use the bfd linker for C++ (this will only be picked up when using Kokkos) -preconfigopts = 'export CXXFLAGS="-fuse-ld=bfd $CXXFLAGS" &&' -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'chem' diff --git a/Overlays/jurecadc_overlay/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb b/Overlays/jurecadc_overlay/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb deleted file mode 100644 index 7210f37bc48a792d042c384051407ef0e62e7057..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb +++ /dev/null @@ -1,168 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '29Oct2020' -versionsuffix = '-CUDA' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'stable_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -cuda_compute_capabilities = ['8.0'] - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# Use the bfd linker for C++ (this will only be picked up when using Kokkos) -preconfigopts = 'export CXXFLAGS="-fuse-ld=bfd $CXXFLAGS" &&' -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'chem' diff --git a/Overlays/jurecadc_overlay/n/NAG/NAG-Mark27-intel-para-2020.eb b/Overlays/jurecadc_overlay/n/NAG/NAG-Mark27-intel-para-2020.eb deleted file mode 100644 index 34da5890eab080d10cf68180bf56589467f18733..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/n/NAG/NAG-Mark27-intel-para-2020.eb +++ /dev/null @@ -1,58 +0,0 @@ -easyblock = 'Tarball' - -name = 'NAG' -version = 'Mark27' - - -homepage = 'http://www.nag.com/numeric/numerical_libraries.asp' -description = """NAG (Numerical Algorithms Group) Library Mark 27 -(Fortran-, C and AD-Library). NAG Library Mark 27 offers hundreds -of user-callable routines to solve mathematical and statistical problems. - -To enhance the performance, the BLAS and LAPACK routines of the MKL are used. - -The complete documentation is available online with - -https://www.nag.co.uk/numeric/nl/nagdoc_latest -""" - -usage = """ -The library is licensed. The necessary licence is provided through the -environment variable NAG_KUSARI_FILE. By default the resource manager exports -all variables to the compute nodes, if you disable this behaviour you will need -to make the environment variable known to the compute nodes by adding: - - --exports=NAG_KUSARI_FILE - -in the srun statement. - -To see, how to compile and link a program, please call: - - nag_example -help - -Compiling and linking can be done in the following manner: - -ifort -I$EBROOTNAG/nag_interface_blocks driver.f -L$MKLPATH\ - -lnag -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -""" - -examples = """To see how to use a special NAG routine please call the script nag_example, e.g.: - - nag_example e04ucf - -An example program and the input data (if necessary) are copied to the current -directory. The example program is compiled, linked and executed. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -sources = [SOURCELOWER_TAR_GZ] - -modextravars = { - 'NAG_KUSARI_FILE': '/p/software/jurecadc/licenses/NAG/nll6i27db.lic', -} - -modluafooter = 'setenv("MKLPATH", pathJoin(os.getenv("MKLROOT"),"lib/intel64"))' - -moduleclass = 'numlib' diff --git a/Overlays/jurecadc_overlay/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb b/Overlays/jurecadc_overlay/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb deleted file mode 100644 index b9dbd75dd8d1cbd62799493f7774344ff89c6b2f..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb +++ /dev/null @@ -1,85 +0,0 @@ -name = 'NVHPC' -version = '20.11' -local_gccver = '9.3.0' -versionsuffix = '-GCC-%s' % local_gccver - -homepage = 'https://developer.nvidia.com/hpc-sdk/' -description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)""" -site_contacts = 'a.herten@fz-juelich.de' - -toolchain = SYSTEM - -# By downloading, you accept the HPC SDK Software License Agreement (https://docs.nvidia.com/hpc-sdk/eula/index.html) -# accept_eula = True -source_urls = ['https://developer.download.nvidia.com/hpc-sdk/%(version)s/'] -local_tarball_tmpl = 'nvhpc_2020_%%(version_major)s%%(version_minor)s_Linux_%s_cuda_multi.tar.gz' -sources = [local_tarball_tmpl % '%(arch)s'] -checksums = [ - { - local_tarball_tmpl % 'x86_64': - 'c80fc26e5ba586696f7030f03054c1aaca0752a891c7923faf47eb23b66857ec', - local_tarball_tmpl % 'ppc64le': - '99e5a5437e82f3914e0fe81feb761a5b599a3fe8b31f3c2cac8ae47e8cdc7b0f' - } -] - -local_gccver = '9.3.0' -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), - ('CUDA', '11.0', '', SYSTEM), - # This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails - ('numactl', '2.0.13', '', SYSTEM) -] - -module_add_cuda = False -cuda_compute_capabilities = "8.0" - -# specify default CUDA version that should be used by NVHPC -# should match one of the CUDA versions that are included with this NVHPC version -# (see install_components/Linux_x86_64/20.7/cuda/) -# for NVHPC 20.7, those are: 11.0, 10.2, 10.1; -# this version can be tweaked from the EasyBuild command line with -# --try-amend=default_cuda_version="10.2" (for example) -default_cuda_version = '11.0' - -# NVHPC EasyBlock supports some features, which can be set via CLI or this easyconfig. -# The following list gives examples for the easyconfig -# -# NVHPC needs CUDA to work. Two options are available: 1) Use NVHPC-bundled CUDA, 2) use system CUDA -# 1) Bundled CUDA -# If no easybuild dependency to CUDA is present, the bundled CUDA is taken. A version needs to be specified with -# default_cuda_version = "11.0" -# in this easyconfig file; alternatively, it can be specified through the command line during installation with -# --try-amend=default_cuda_version="10.2" -# 2) CUDA provided via EasyBuild -# Use CUDAcore as a dependency, for example -# dependencies = [('CUDAcore', '11.0.2')] -# The parameter default_cuda_version still can be set as above. -# If not set, it will be deduced from the CUDA module (via $EBVERSIONCUDA) -# -# Define a NVHPC-default Compute Capability -# cuda_compute_capabilities = "8.0" -# Can also be specified on the EasyBuild command line via --cuda-compute-capabilities=8.0 -# Only single values supported, not lists of values! -# -# Options to add/remove things to/from environment module (defaults shown) -# module_byo_compilers = False # Remove compilers from PATH (Bring-your-own compilers) -# module_nvhpc_own_mpi = False # Add NVHPC's own pre-compiled OpenMPI -# module_add_math_libs = False # Add NVHPC's math libraries (which should be there from CUDA anyway) -# module_add_profilers = False # Add NVHPC's NVIDIA Profilers -# module_add_nccl = False # Add NVHPC's NCCL library -# module_add_nvshmem = False # Add NVHPC's NVSHMEM library -# module_add_cuda = False # Add NVHPC's bundled CUDA - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = ''' -family("compiler") -add_property("arch","gpu") -''' - -# Always do a recursive unload on compilers -recursive_module_unload = True diff --git a/Overlays/jurecadc_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb b/Overlays/jurecadc_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb deleted file mode 100644 index 030ece8ba92f6d9c2db0bd9b7ca5aa461175d250..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,59 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2020.2.0' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.18-28964561.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['604769a55a72adce8f1513fcacb36d7cf5b5e3cc99b65d6a20e4d5e987344cb0'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - # ('nvidia-driver', 'default', '', SYSTEM), - ('X11', '20200222') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround 1) (find) due to wrong permissions once the files are extracted from the .run file -# Workaround 2) (mv) due to CentOS 8 coming with a newer libk5crypto.so which doesn't have the symbols -# anymore needed by Nsight Compute's own libcrypto.so. Removing / renaming the shipped -# libcrypto.so makes Nsight Compute pick up the system libcrypto.so and everything is -# grand again. Bug has been filed by Andreas, 13.10.2020 - -local_libcrypto_path = '%(installdir)s/host/linux-desktop-glibc_2_11_3-x64/libcrypto.so' -postinstallcmds = [ - 'find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;', - 'mv %s %s.bak' % (local_libcrypto_path, local_libcrypto_path), - 'mv %s.1.1 %s.1.1.bak' % (local_libcrypto_path, local_libcrypto_path) -] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Overlays/jurecadc_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb b/Overlays/jurecadc_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb deleted file mode 100644 index a1543be921a2c8fa2b2c559b6f5314f69d884245..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb +++ /dev/null @@ -1,58 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default OpenMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'UCX_MAX_RNDV_RAILS': '1', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': '^uct,openib', - 'OMPI_MCA_btl_openib_allow_ib': '1', - 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - 'OMPI_MCA_coll': '^ml', - 'OMPI_MCA_coll_hcoll_enable': '1', - 'OMPI_MCA_coll_hcoll_np': '0', - 'OMPI_MCA_pml': 'ucx', - 'OMPI_MCA_osc': '^rdma', - 'OMPI_MCA_opal_abort_print_stack': '1', - 'OMPI_MCA_opal_set_max_sys_limits': '1', - 'OMPI_MCA_opal_event_include': 'epoll', - 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # OMPIO does not seem to work reliably on our system - 'OMPI_MCA_io': 'romio321', -} - -moduleclass = 'system' diff --git a/Overlays/jurecadc_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb b/Overlays/jurecadc_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb deleted file mode 100644 index e276b02b5ce269373fd0b9443d0fb71b94cd98a5..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'CUDA' - -homepage = '' -description = """This is a module to load the default OpenMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'UCX_MAX_RNDV_RAILS': '1', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': '^uct,openib', - 'OMPI_MCA_btl_openib_allow_ib': '1', - 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - 'OMPI_MCA_coll': '^ml', - 'OMPI_MCA_coll_hcoll_enable': '1', - 'OMPI_MCA_coll_hcoll_np': '0', - 'OMPI_MCA_pml': 'ucx', - 'OMPI_MCA_osc': '^rdma', - 'OMPI_MCA_opal_abort_print_stack': '1', - 'OMPI_MCA_opal_set_max_sys_limits': '1', - 'OMPI_MCA_opal_event_include': 'epoll', - 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # OMPIO does not seem to work reliably on our system - 'OMPI_MCA_io': 'romio321', -} - -moduleclass = 'system' diff --git a/Overlays/jurecadc_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb b/Overlays/jurecadc_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb deleted file mode 100644 index ab139ab9b6585b6378b990ab154463b778a96764..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.9.0', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jurecadc_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/jurecadc_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 155e0c334d0fa886a3d2eaae3ba013e91cdf38d4..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.9.0', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb b/Overlays/jurecadc_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb deleted file mode 100644 index f535c63f0d4ecea43a7f82cb2198a69c0a22b1de..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb +++ /dev/null @@ -1,94 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyTorch-Geometric' -version = '1.6.3' -local_pytorch_ver = '1.7.0' -versionsuffix = '-Python-%%(pyver)s-PyTorch-%s' % local_pytorch_ver - -homepage = 'https://github.com/rusty1s/pytorch_geometric' -description = "PyTorch Geometric (PyG) is a geometric deep learning extension library for PyTorch." - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -local_pysuff = '-Python-%(pyver)s' -dependencies = [ - ('Python', '3.8.5'), - ('PyTorch', local_pytorch_ver, local_pysuff), - ('numba', '0.51.1', local_pysuff), - ('h5py', '2.10.0', '-serial%s' % local_pysuff), - ('scikit', '2020', local_pysuff), - ('torchvision', '0.8.2', local_pysuff), - ('trimesh', '3.8.11', local_pysuff), - ('METIS', '5.1.0', '-IDX64'), -] - -use_pip = True - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_download_dep_fail = True -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('gdist', '1.0.3', { - 'source_urls': ['https://pypi.python.org/packages/source/g/gdist'], - 'modulename': 'gdist', - }), - ('googledrivedownloader', '0.4', { - 'checksums': ['4b34c1337b2ff3bf2bd7581818efbdcaea7d50ffd484ccf80809688f5ca0e204'], - 'modulename': 'google_drive_downloader', - }), - ('plyfile', '0.7.2', { - 'checksums': ['59a25845d00a51098e6c9147c3c96ce89ad97395e256a4fabb4aed7cf7db5541'], - }), - ('torch_scatter', '2.0.5', { - 'patches': ['torch_scatter-2.0.5-sm_80.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '2.0.5.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_scatter/archive/'], - }), - ('torch_sparse', '0.6.8', { - 'patches': ['torch_sparse-0.6.8-sm_80.patch'], - 'prebuildopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'preinstallopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'source_tmpl': '0.6.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_sparse/archive/'], - }), - ('torch_cluster', '1.5.8', { - 'patches': ['torch_cluster-1.5.8-sm_80.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.5.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_cluster/archive/'], - }), - ('torch_spline_conv', '1.2.0', { - 'patches': ['torch_spline_conv-1.2.0-sm_80.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.2.0.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_spline_conv/archive'], - }), - ('ase', '3.21.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/ase'], - 'modulename': 'ase', - }), - ('python-louvain', '0.15', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-louvain'], - 'checksums': ['2a856edfbe29952a60a5538a84bb78cca18f6884a88b9325e85a11c8dd4917eb'], - 'modulename': 'community', - }), - ('tqdm', '4.56.0', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tqdm'], - 'modulename': 'tqdm', - }), - ('torch_geometric', version, { - 'checksums': ['347f693bebcc8a621eda4867dafab91c04db5f596d7ed7ecb89b242f8ab5c6a1'], - }), -] - -sanity_pip_check = True - -moduleclass = 'devel' diff --git a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_80.patch b/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_80.patch deleted file mode 100644 index c80da06e0469c2c613424a4df95950c022c60895..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_80.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:10:11.609352000 +0100 -+++ setup.py 2021-01-20 10:10:37.525550350 +0100 -@@ -39,7 +39,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_80,code=sm_80', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_80.patch b/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_80.patch deleted file mode 100644 index ec5521c3024f876be8c5f6999256757e7d0631ec..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_80.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 09:53:22.085271000 +0100 -+++ setup.py 2021-01-20 09:53:54.835241801 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_80,code=sm_80', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_80.patch b/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_80.patch deleted file mode 100644 index 5439544a81cb588a07218faeb89272c07d9b2595..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_80.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:07:15.518446000 +0100 -+++ setup.py 2021-01-20 10:07:51.389877000 +0100 -@@ -53,7 +53,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_80,code=sm_80', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - if sys.platform == 'win32': diff --git a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_80.patch b/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_80.patch deleted file mode 100644 index a3ae24b363e5bce2e2937c14301af194bd14cc14..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_80.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:12:33.326687000 +0100 -+++ setup.py 2021-01-20 10:12:51.492198482 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_80,code=sm_80', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/jurecadc_overlay/p/pscom/pscom-5.4-default.eb b/Overlays/jurecadc_overlay/p/pscom/pscom-5.4-default.eb deleted file mode 100644 index 375c9c2d3dcfc25b617bb06fb4a9f4d42ca5e906..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/pscom/pscom-5.4-default.eb +++ /dev/null @@ -1,49 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'pscom' -# Create drop-in replacement version that ensures over-riding behaviour -version = "5.4-default" -local_realversion = "5.4.6-1_gw" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -sources = ['%%(name)s-%s.tar.bz2' % local_realversion] - -builddependencies = [ - # Fails with binutils 2.34 - ('binutils', '2.32'), - ('popt', '1.16'), - ('CUDA', '11.0'), -] - -dependencies = [ - ('UCX', '1.9.0'), -] - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '--enable-cuda --enable-ucp' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ('lib/libpscom4gateway.so', 'lib64/libpscom4gateway.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % local_realversion, -} - -moduleclass = 'tools' diff --git a/Overlays/jurecadc_overlay/p/pscom/pscom-5.4.6-1.eb b/Overlays/jurecadc_overlay/p/pscom/pscom-5.4.6-1.eb deleted file mode 100644 index db3e6a6b9c6ae94ec1c2932c37d1fc9346b23e96..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/pscom/pscom-5.4.6-1.eb +++ /dev/null @@ -1,47 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'pscom' -version = "5.4.6-1" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -sources = ['%%(name)s-%s_gw.tar.bz2' % version] - -builddependencies = [ - # Fails with binutils 2.34 - ('binutils', '2.32'), - ('popt', '1.16'), - ('CUDA', '11.0'), -] - -dependencies = [ - ('UCX', '1.8.1'), -] - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '--enable-cuda --enable-ucp' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ('lib/libpscom4gateway.so', 'lib64/libpscom4gateway.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % version, -} - -moduleclass = 'tools' diff --git a/Overlays/jurecadc_overlay/p/pscom/pscom-5.4.7-1.eb b/Overlays/jurecadc_overlay/p/pscom/pscom-5.4.7-1.eb deleted file mode 100644 index 438dcc3ca66da9503b122867d800b7faf64469e3..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/pscom/pscom-5.4.7-1.eb +++ /dev/null @@ -1,48 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'pscom' -version = "5.4.7-1" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -sources = ['%%(name)s-%s_gw.tar.bz2' % version] - -builddependencies = [ - ('popt', '1.16'), - ('CUDA', '11.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('UCX', '1.9.0'), -] - -build_type = 'RelWithDebInfo' - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '-DCUDA_ENABLED=ON' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ('lib/libpscom4gateway.so', 'lib64/libpscom4gateway.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % version, -} - -moduleclass = 'tools' diff --git a/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb b/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb deleted file mode 100644 index 4c0c2ddc4ebb07193099d57e3daf2e0f56a49a93..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default ParaStationMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'UCX_MAX_RNDV_RAILS': '1', -} - -moduleclass = 'system' diff --git a/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb b/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb deleted file mode 100644 index 77094b63703cd8f1d4b5d490ae82cc99f5ab3d9e..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'CUDA' - -homepage = '' -description = 'This is a module to load the default ParaStationMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'UCX_MAX_RNDV_RAILS': '1', -} - -moduleclass = 'system' diff --git a/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb b/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb deleted file mode 100644 index af53fdc9cd8e1bb789a315e13a27e0581e0fa18f..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default ParaStationMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'UCX_MAX_RNDV_RAILS': '1', -} - -moduleclass = 'system' diff --git a/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb b/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb deleted file mode 100644 index c5b260f0bd9608cebaea54a357eccb38622b8b28..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'CUDA' - -homepage = '' -description = 'This is a module to load the default ParaStationMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'UCX_MAX_RNDV_RAILS': '1', -} - -moduleclass = 'system' diff --git a/Overlays/jurecadc_overlay/u/UCX/UCX-1.8.1.eb b/Overlays/jurecadc_overlay/u/UCX/UCX-1.8.1.eb deleted file mode 100644 index 740e9d2e3d5f84bb1f22280bac5c90f32d8f4e33..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/u/UCX/UCX-1.8.1.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.8.1' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -# configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support -configopts += '--without-cm ' # Disable IB CM - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/jurecadc_overlay/u/UCX/UCX-1.9.0.eb b/Overlays/jurecadc_overlay/u/UCX/UCX-1.9.0.eb deleted file mode 100644 index be5e7d345e87b84378bcbf8af348c6888b20c666..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/u/UCX/UCX-1.9.0.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.9.0' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support -configopts += '--without-cm ' # Disable IB CM - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/jurecadc_overlay/v/Vampir/Vampir-9.9.0.eb b/Overlays/jurecadc_overlay/v/Vampir/Vampir-9.9.0.eb deleted file mode 100644 index bd7449cc1fe09bb86fd46f2d4a6c7f77bdd1d167..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/v/Vampir/Vampir-9.9.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'Binary' - -name = "Vampir" -version = "9.9.0" -local_archsuffix = "-linux-x86_64" - -homepage = 'http://www.vampir.eu' -description = """The VAMPIR software tool provides an easy-to-use framework that enables -developers to quickly display and analyze arbitrary program behavior at any level of detail. -The tool suite implements optimized event analysis algorithms and customizable displays that -enable fast and interactive rendering of very complex performance monitoring data. - -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['vampir-%s%s-setup.sh' % (version, local_archsuffix)] - -install_cmd = './vampir-%(version)s-linux-x86_64-setup.sh --silent --instdir=%(installdir)s' - -sanity_check_paths = { - 'files': ["bin/vampir", "doc/vampir-manual.pdf"], - 'dirs': [] -} - -modextravars = { - 'VAMPIR_LICENSE': '/p/software/jurecadc/licenses/vampir/vampir.license', -} - -moduleclass = 'tools' diff --git a/Overlays/jurecadc_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb b/Overlays/jurecadc_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb deleted file mode 100644 index fb750f42d8eaa5a0f75070b125e0a4ee43d1f821..0000000000000000000000000000000000000000 --- a/Overlays/jurecadc_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'Binary' - -name = "VampirServer" -version = "9.9.0" - -homepage = 'http://www.vampir.eu' -description = """The VAMPIR software tool provides an easy-to-use framework that enables -developers to quickly display and analyze arbitrary program behavior at any level of detail. -The tool suite implements optimized event analysis algorithms and customizable displays that -enable fast and interactive rendering of very complex performance monitoring data. -""" - -usage = """ -To start VampirServer -module load Vampir VampirServer -vampir & -BATCH_OPT="--account=<budget> --partition=<partition>" vampirserver start -n 4 mpi -(note server + port + server_id) -- Use it -Vampir GUI-> open other -> remote file -> server + port -- To stop VampirServer -vampirserver stop <server_id> -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -toolchainopts = {"usempi": True} - -sources = ['vampirserver-%s-linux-x86_64-setup.sh' % (version)] - -install_cmd = ('./vampirserver-%(version)s-linux-x86_64-setup.sh --silent --instdir=%(installdir)s ' - '&& %(installdir)s/bin/vampirserver config --silent') - -sanity_check_paths = { - 'files': ["bin/vampirserver", "doc/vampirserver-manual.pdf"], - 'dirs': [] -} - -# Remove Cray-specific 'ap' launcher, -# use SLURM launcher as MPI launcher and default -postinstallcmds = [ - 'rm %(installdir)s/etc/server/launcher/ap', - '''sed -i s/'BATCH_OPT=""'/'#BATCH_OPT=""'/g %(installdir)s/etc/server/launcher/custom/slurm''', - 'cp %(installdir)s/etc/server/launcher/custom/slurm %(installdir)s/etc/server/launcher/mpi', -] - -modextravars = { - 'VAMPIR_LICENSE': '/p/software/jurecadc/licenses/vampir/vampir.license', -} - -moduleclass = 'perf' diff --git a/Overlays/jusuf_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb b/Overlays/jusuf_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb deleted file mode 100644 index edef15b1e9d240917a98d3aa3761e126b43eec31..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because AMD zen processors do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Overlays/jusuf_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb b/Overlays/jusuf_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb deleted file mode 100644 index fd45dceca6a1fddc684a6100db02901943a40010..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because AMD zen processors do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Overlays/jusuf_overlay/h/HDF/HDF-4.2.15-GCC-9.3.0.eb b/Overlays/jusuf_overlay/h/HDF/HDF-4.2.15-GCC-9.3.0.eb deleted file mode 100644 index 6178b0c12adac5ef08a67a76dd1afc8cb3e585d7..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/h/HDF/HDF-4.2.15-GCC-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HDF' -version = '4.2.15' - -homepage = 'http://www.hdfgroup.org/products/hdf4/' -description = """HDF (also known as HDF4) is a library and multi-object file format for storing - and managing data between machines. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('Java', '15', '', SYSTEM), -] - -dependencies = [ - ('Szip', '2.1.1'), - ('zlib', '1.2.11'), - ('JasPer', '2.0.19'), - ('libjpeg-turbo', '2.0.5'), -] - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.split('-')[0]] - -configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --enable-java ' -configopts += '--includedir=%(installdir)s/include/%(namelower)s ' - -sanity_check_paths = { - 'files': ['lib/libdf.a', 'lib/libhdf4.settings', 'lib/libmfhdf.a'], - 'dirs': ['bin', 'include/hdf'], -} - -moduleclass = 'data' diff --git a/Overlays/jusuf_overlay/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/jusuf_overlay/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index b3d0a06f6ac30a4a58270b00cf6ea4150dc7eec3..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/h/HDF/HDF-4.2.15-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'HDF' -version = '4.2.15' - -homepage = 'http://www.hdfgroup.org/products/hdf4/' -description = """HDF (also known as HDF4) is a library and multi-object file format for storing - and managing data between machines. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} -toolchainopts = {'opt': True, 'pic': True} - -builddependencies = [ - ('flex', '2.6.4'), - ('Bison', '3.6.4'), - ('Java', '15', '', SYSTEM), -] - -dependencies = [ - ('Szip', '2.1.1'), - ('zlib', '1.2.11'), - ('JasPer', '2.0.19'), - ('libjpeg-turbo', '2.0.5'), -] - -sources = [SOURCELOWER_TAR_GZ] -source_urls = ['http://www.hdfgroup.org/ftp/HDF/releases/HDF%s/src/' % version.split('-')[0]] - -configopts = '--with-szlib=$EBROOTSZIP --with-zlib=$EBROOTZLIB --enable-java ' -configopts += '--includedir=%(installdir)s/include/%(namelower)s ' - -sanity_check_paths = { - 'files': ['lib/libdf.a', 'lib/libhdf4.settings', 'lib/libmfhdf.a'], - 'dirs': ['bin', 'include/hdf'], -} - -moduleclass = 'data' diff --git a/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb b/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb deleted file mode 100644 index d1cd2d234aa3003b20e5ec3ca3b519616fc95ad0..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb b/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb deleted file mode 100644 index 5502b43c6c03ece499621671ab45e24840c39c78..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb b/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb deleted file mode 100644 index 5c44a9055c120e2d15382588bbeaa6f0668fe39b..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb b/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb deleted file mode 100644 index 62645ee3f76e5b2e60b8aae2e9569a597f81c430..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/jusuf_overlay/h/HDF5/skip_fortran_fp_kind_test.patch b/Overlays/jusuf_overlay/h/HDF5/skip_fortran_fp_kind_test.patch deleted file mode 100644 index cc1b1035394e9d9c59914ee1810854ba6a75c4cd..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/h/HDF5/skip_fortran_fp_kind_test.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -Nru hdf5-1.10.6_orig/fortran/test/H5_test_buildiface.F90 hdf5-1.10.6/fortran/test/H5_test_buildiface.F90 ---- hdf5-1.10.6_orig/fortran/test/H5_test_buildiface.F90 2020-10-21 12:16:45.673723000 +0200 -+++ hdf5-1.10.6/fortran/test/H5_test_buildiface.F90 2020-10-21 13:09:22.455045487 +0200 -@@ -215,7 +215,7 @@ - WRITE(11,'(A)') ' INTEGER, OPTIONAL, INTENT( IN ) :: ulp' - WRITE(11,'(A)') ' IF ( PRESENT( ulp ) ) Rel = REAL( ABS(ulp), '//TRIM(ADJUSTL(chr2))//')' - WRITE(11,'(A)') ' Rel = 1.0_'//TRIM(ADJUSTL(chr2)) -- WRITE(11,'(A)') ' real_eq_kind_'//TRIM(ADJUSTL(chr2))//' = ABS( a - b ) < ( Rel * SPACING( MAX(ABS(a),ABS(b)) ) )' -+ WRITE(11,'(A)') ' real_eq_kind_'//TRIM(ADJUSTL(chr2))//' = 0 < 1' - WRITE(11,'(A)') ' END FUNCTION real_eq_kind_'//TRIM(ADJUSTL(chr2)) - ENDDO - diff --git a/Overlays/jusuf_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/jusuf_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 14f9dd1ef8e328d779c3f7fe85f19bb9c1909a19..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,64 +0,0 @@ -name = 'iccifort' -version = '2020.2.254' -versionsuffix = '-GCC-9.3.0' - -homepage = 'https://software.intel.com/en-us/intel-compilers/' -description = "Intel C, C++ & Fortran compilers" - -modloadmsg = ''' -We have observed situations where the Intel compiler does not enable vectorization and is overly -conservative when applying optimizations on AMD systems when using -xHost. Other sites report -similar issues also with -xCORE-AVX2. -Our preeliminary results indicate that -march=core-avx2 works as expected and delivers good -performance. - -Please report performance problems on our installed stack with Intel compilers to sc@fz-juelich.de -and keep it in mind when compiling your code with Intel compilers in this system. -''' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16744/'] -sources = ['parallel_studio_xe_%(version_major)s_update%(version_minor)s_cluster_edition.tgz'] -patches = ['iccifort-%(version)s_no_mpi_rt_dependency.patch'] -checksums = [ - # parallel_studio_xe_2020_update2_composer_edition.tgz - '4795c44374e8988b91da20ac8f13022d7d773461def4a26ca210a8694f69f133', - # iccifort-2020.2.254_no_mpi_rt_dependency.patch - '73e582d9e108d0680c19c14e9a9c49dbbb06829e39ba8ed87bfd6b4222231196', -] - -local_gccver = '9.3.0' - -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), -] - -# list of regex for components to install -# full list of components can be obtained from pset/mediaconfig.xml in unpacked sources -# cfr. https://software.intel.com/en-us/articles/intel-composer-xe-2015-silent-installation-guide -components = [ - 'intel-comp', 'intel-ccomp', 'intel-fcomp', 'intel-icc', 'intel-ifort', - 'intel-openmp', 'intel-ipsc?_', 'intel-gdb(?!.*mic)' -] - -dontcreateinstalldir = True - -# disable data collection -modextravars = { - 'INTEL_DISABLE_ISIP': '1' -} - -# We have a custom naming scheme that allows us to use a more descriptive module name -modaltsoftname = 'Intel' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = 'family("compiler")' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -moduleclass = 'compiler' diff --git a/Overlays/jusuf_overlay/m/M4/M4-1.4.18-GCCcore-9.3.0.eb b/Overlays/jusuf_overlay/m/M4/M4-1.4.18-GCCcore-9.3.0.eb deleted file mode 100644 index 32af8ba7451cb5c0b4a7fbbd3d7a6898e064d088..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/m/M4/M4-1.4.18-GCCcore-9.3.0.eb +++ /dev/null @@ -1,29 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'M4' -version = '1.4.18' - -homepage = 'http://www.gnu.org/software/m4/m4.html' -description = """GNU M4 is an implementation of the traditional Unix macro processor. It is mostly SVR4 compatible - although it has some extensions (for example, handling more than 9 positional parameters to macros). - GNU M4 also has built-in functions for including files, running shell commands, doing arithmetic, etc. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -# use same binutils version that was used when building GCC toolchain -builddependencies = [('binutils', '2.34', '', SYSTEM)] - -configopts = "--enable-cxx" - -sanity_check_paths = { - 'files': ["bin/m4"], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Overlays/jusuf_overlay/m/M4/M4-1.4.18.eb b/Overlays/jusuf_overlay/m/M4/M4-1.4.18.eb deleted file mode 100644 index d686b2eb04b511fde3cecd0ea74d3d3cb012f7d5..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/m/M4/M4-1.4.18.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'M4' -version = '1.4.18' - -homepage = 'http://www.gnu.org/software/m4/m4.html' -description = """GNU M4 is an implementation of the traditional Unix macro processor. - It is mostly SVR4 compatible although it has some extensions - (for example, handling more than 9 positional parameters to macros). - GNU M4 also has built-in functions for including files, running shell commands, doing arithmetic, etc. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = [SOURCELOWER_TAR_GZ] -source_urls = [GNU_SOURCE] - -configopts = "--enable-cxx" - -sanity_check_paths = { - 'files': ["bin/m4"], - 'dirs': [], -} - -moduleclass = 'devel' diff --git a/Overlays/jusuf_overlay/n/NAG/NAG-Mark27-intel-para-2020.eb b/Overlays/jusuf_overlay/n/NAG/NAG-Mark27-intel-para-2020.eb deleted file mode 100644 index 9e3009c95c63a919d35fe1dad6dccb89f2f86368..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/n/NAG/NAG-Mark27-intel-para-2020.eb +++ /dev/null @@ -1,58 +0,0 @@ -easyblock = 'Tarball' - -name = 'NAG' -version = 'Mark27' - - -homepage = 'http://www.nag.com/numeric/numerical_libraries.asp' -description = """NAG (Numerical Algorithms Group) Library Mark 27 -(Fortran-, C and AD-Library). NAG Library Mark 27 offers hundreds -of user-callable routines to solve mathematical and statistical problems. - -To enhance the performance, the BLAS and LAPACK routines of the MKL are used. - -The complete documentation is available online with - -https://www.nag.co.uk/numeric/nl/nagdoc_latest -""" - -usage = """ -The library is licensed. The necessary licence is provided through the -environment variable NAG_KUSARI_FILE. By default the resource manager exports -all variables to the compute nodes, if you disable this behaviour you will need -to make the environment variable known to the compute nodes by adding: - - --exports=NAG_KUSARI_FILE - -in the srun statement. - -To see, how to compile and link a program, please call: - - nag_example -help - -Compiling and linking can be done in the following manner: - -ifort -I$EBROOTNAG/nag_interface_blocks driver.f -L$MKLPATH\ - -lnag -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -""" - -examples = """To see how to use a special NAG routine please call the script nag_example, e.g.: - - nag_example e04ucf - -An example program and the input data (if necessary) are copied to the current -directory. The example program is compiled, linked and executed. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'intel-para', 'version': '2020'} -sources = [SOURCELOWER_TAR_GZ] - -modextravars = { - 'NAG_KUSARI_FILE': '/p/software/jusuf/licenses/NAG/nll6i27db.lic', -} - -modluafooter = 'setenv("MKLPATH", pathJoin(os.getenv("MKLROOT"),"lib/intel64"))' - -moduleclass = 'numlib' diff --git a/Overlays/jusuf_overlay/n/nvidia-driver/nvidia-driver-default.eb b/Overlays/jusuf_overlay/n/nvidia-driver/nvidia-driver-default.eb deleted file mode 100644 index 4a128c8b7f26a3d488c545e4743edf1173088788..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/n/nvidia-driver/nvidia-driver-default.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'nvidia-driver' -version = 'default' -realversion = '470.57.02' - -homepage = 'https://developer.nvidia.com/cuda-toolkit' -description = """This is a set of libraries normally installed by the NVIDIA driver installer.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['http://us.download.nvidia.com/tesla/%s/' % realversion] -sources = ['NVIDIA-Linux-x86_64-%s.run' % realversion] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'system' diff --git a/Overlays/jusuf_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb b/Overlays/jusuf_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb deleted file mode 100644 index 2b78656cdfa98d297717777580c97b586621933b..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb +++ /dev/null @@ -1,57 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default OpenMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': '^uct,openib', - 'OMPI_MCA_btl_openib_allow_ib': '1', - 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - 'OMPI_MCA_coll': '^ml', - 'OMPI_MCA_coll_hcoll_enable': '1', - 'OMPI_MCA_coll_hcoll_np': '0', - 'OMPI_MCA_pml': 'ucx', - 'OMPI_MCA_osc': '^rdma', - 'OMPI_MCA_opal_abort_print_stack': '1', - 'OMPI_MCA_opal_set_max_sys_limits': '1', - 'OMPI_MCA_opal_event_include': 'epoll', - 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # So mlx5_0 is not taken, given that one is an ethernet port in jusuf - 'OMPI_MCA_btl_openib_if_include': 'mlx5_1', -} - -moduleclass = 'system' diff --git a/Overlays/jusuf_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb b/Overlays/jusuf_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb deleted file mode 100644 index 69b8dfdddd0b008fbdcee4c7eee54b96e57368a1..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb +++ /dev/null @@ -1,44 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'CUDA' - -homepage = '' -description = 'This is a module to load the default OpenMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': '^uct,openib', - 'OMPI_MCA_btl_openib_allow_ib': '1', - 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - 'OMPI_MCA_coll': '^ml', - 'OMPI_MCA_coll_hcoll_enable': '1', - 'OMPI_MCA_coll_hcoll_np': '0', - 'OMPI_MCA_pml': 'ucx', - 'OMPI_MCA_osc': '^rdma', - 'OMPI_MCA_opal_abort_print_stack': '1', - 'OMPI_MCA_opal_set_max_sys_limits': '1', - 'OMPI_MCA_opal_event_include': 'epoll', - 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # So mlx5_0 is not taken, given that one is an ethernet port in jusuf - 'OMPI_MCA_btl_openib_if_include': 'mlx5_1', -} - -moduleclass = 'system' diff --git a/Overlays/jusuf_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb b/Overlays/jusuf_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb deleted file mode 100644 index ab139ab9b6585b6378b990ab154463b778a96764..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.9.0', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jusuf_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/jusuf_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 155e0c334d0fa886a3d2eaae3ba013e91cdf38d4..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.9.0', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/jusuf_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb b/Overlays/jusuf_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb deleted file mode 100644 index 374e4e52bb3e11daecccf8504d074bc23f673c41..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb +++ /dev/null @@ -1,98 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyTorch-Geometric' -version = '1.6.3' -local_pytorch_ver = '1.7.0' -versionsuffix = '-Python-%%(pyver)s-PyTorch-%s' % local_pytorch_ver - -homepage = 'https://github.com/rusty1s/pytorch_geometric' -description = "PyTorch Geometric (PyG) is a geometric deep learning extension library for PyTorch." - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -local_pysuff = '-Python-%(pyver)s' -dependencies = [ - ('Python', '3.8.5'), - ('PyTorch', local_pytorch_ver, local_pysuff), - ('numba', '0.51.1', local_pysuff), - ('h5py', '2.10.0', '-serial%s' % local_pysuff), - ('scikit', '2020', local_pysuff), - ('torchvision', '0.8.2', local_pysuff), - ('trimesh', '3.8.11', local_pysuff), - ('METIS', '5.1.0', '-IDX64'), -] - -use_pip = True - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_download_dep_fail = True -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('gdist', '1.0.3', { - 'source_urls': ['https://pypi.python.org/packages/source/g/gdist'], - 'modulename': 'gdist', - }), - ('rdflib', '5.0.0', { - 'checksums': ['78149dd49d385efec3b3adfbd61c87afaf1281c30d3fcaf1b323b34f603fb155'], - 'modulename': 'rdflib', - }), - ('googledrivedownloader', '0.4', { - 'checksums': ['4b34c1337b2ff3bf2bd7581818efbdcaea7d50ffd484ccf80809688f5ca0e204'], - 'modulename': 'google_drive_downloader', - }), - ('plyfile', '0.7.2', { - 'checksums': ['59a25845d00a51098e6c9147c3c96ce89ad97395e256a4fabb4aed7cf7db5541'], - }), - ('torch_scatter', '2.0.5', { - 'patches': ['torch_scatter-2.0.5-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '2.0.5.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_scatter/archive/'], - }), - ('torch_sparse', '0.6.8', { - 'patches': ['torch_sparse-0.6.8-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'preinstallopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'source_tmpl': '0.6.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_sparse/archive/'], - }), - ('torch_cluster', '1.5.8', { - 'patches': ['torch_cluster-1.5.8-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.5.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_cluster/archive/'], - }), - ('torch_spline_conv', '1.2.0', { - 'patches': ['torch_spline_conv-1.2.0-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.2.0.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_spline_conv/archive'], - }), - ('ase', '3.21.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/ase'], - 'modulename': 'ase', - }), - ('python-louvain', '0.15', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-louvain'], - 'checksums': ['2a856edfbe29952a60a5538a84bb78cca18f6884a88b9325e85a11c8dd4917eb'], - 'modulename': 'community', - }), - ('tqdm', '4.56.0', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tqdm'], - 'modulename': 'tqdm', - }), - ('torch_geometric', version, { - 'checksums': ['347f693bebcc8a621eda4867dafab91c04db5f596d7ed7ecb89b242f8ab5c6a1'], - }), -] - -sanity_pip_check = True - -moduleclass = 'devel' diff --git a/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch b/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch deleted file mode 100644 index 25bbf0ec24a03461821a0b38e4ef043efcb052d0..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:10:11.609352000 +0100 -+++ setup.py 2021-01-20 10:10:37.525550350 +0100 -@@ -39,7 +39,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch b/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch deleted file mode 100644 index 94ae43ed4ef1a4d0788e0d08a0564b8978868506..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 09:53:22.085271000 +0100 -+++ setup.py 2021-01-20 09:53:54.835241801 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch b/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch deleted file mode 100644 index 1a4f2d0fbc9e83c52a46f82dd5abfc6bdf00af82..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:07:15.518446000 +0100 -+++ setup.py 2021-01-20 10:07:51.389877000 +0100 -@@ -53,7 +53,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - if sys.platform == 'win32': diff --git a/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch b/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch deleted file mode 100644 index e0a3e0ccf186aef0a4c22892b79ae0db810f351c..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:12:33.326687000 +0100 -+++ setup.py 2021-01-20 10:12:51.492198482 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/jusuf_overlay/p/pscom/pscom-5.4-default.eb b/Overlays/jusuf_overlay/p/pscom/pscom-5.4-default.eb deleted file mode 100644 index ad61598bad5a72d05eccb8a10061981b8bf5287f..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/pscom/pscom-5.4-default.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'pscom' -# Create drop-in replacement version that ensures over-riding behaviour -version = "5.4-default" -local_realversion = "5.4.7-1" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = ['https://github.com/ParaStation/%(name)s/archive/'] -sources = ['%s.tar.gz' % local_realversion] - -builddependencies = [ - ('popt', '1.16'), - ('CUDA', '11.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('UCX', '1.9.0'), -] - -build_type = 'RelWithDebInfo' - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '-DCUDA_ENABLED=ON' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % local_realversion, -} - -moduleclass = 'tools' diff --git a/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb b/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb deleted file mode 100644 index b35b112ac7c1b6f265a71b3e54acc6a57febcd77..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default ParaStationMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'UCX_NET_DEVICES': 'mlx5_1:1', -} - -moduleclass = 'system' diff --git a/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb b/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb deleted file mode 100644 index da1f9a94bf2b07278003e1cd1ed6b3de5abdbeca..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'CUDA' - -homepage = '' -description = 'This is a module to load the default ParaStationMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'UCX_NET_DEVICES': 'mlx5_1:1', -} - -moduleclass = 'system' diff --git a/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-plain.eb b/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-plain.eb deleted file mode 100644 index a37f3ef07cdf052d78734ca6795ff6110973e393..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/p/psmpi-settings/psmpi-settings-5.4-plain.eb +++ /dev/null @@ -1,22 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'plain' - -homepage = '' -description = 'This is a module to load the default ParaStationMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_HARD_ABORT': '1', - 'UCX_NET_DEVICES': 'mlx5_1:1', -} - -moduleclass = 'system' diff --git a/Overlays/jusuf_overlay/u/UCX/UCX-1.8.1.eb b/Overlays/jusuf_overlay/u/UCX/UCX-1.8.1.eb deleted file mode 100644 index df14257476b2a77486b622bf5d35c9ed4795803a..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/u/UCX/UCX-1.8.1.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.8.1' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -# configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/jusuf_overlay/u/UCX/UCX-1.9.0.eb b/Overlays/jusuf_overlay/u/UCX/UCX-1.9.0.eb deleted file mode 100644 index cdf6510cf9b9cd517a3d306120d6215db617c86c..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/u/UCX/UCX-1.9.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.9.0' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/jusuf_overlay/v/Vampir/Vampir-9.9.0.eb b/Overlays/jusuf_overlay/v/Vampir/Vampir-9.9.0.eb deleted file mode 100644 index b52587b858076bfcc4f4fbea45bbc7431a8233f7..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/v/Vampir/Vampir-9.9.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'Binary' - -name = "Vampir" -version = "9.9.0" -local_archsuffix = "-linux-x86_64" - -homepage = 'http://www.vampir.eu' -description = """The VAMPIR software tool provides an easy-to-use framework that enables -developers to quickly display and analyze arbitrary program behavior at any level of detail. -The tool suite implements optimized event analysis algorithms and customizable displays that -enable fast and interactive rendering of very complex performance monitoring data. - -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['vampir-%s%s-setup.sh' % (version, local_archsuffix)] - -install_cmd = './vampir-%(version)s-linux-x86_64-setup.sh --silent --instdir=%(installdir)s' - -sanity_check_paths = { - 'files': ["bin/vampir", "doc/vampir-manual.pdf"], - 'dirs': [] -} - -modextravars = { - 'VAMPIR_LICENSE': '/p/software/jusuf/licenses/vampir/vampir.license', -} - -moduleclass = 'tools' diff --git a/Overlays/jusuf_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb b/Overlays/jusuf_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb deleted file mode 100644 index edcba855dada07783b1bda33dd4a212b9b00719f..0000000000000000000000000000000000000000 --- a/Overlays/jusuf_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'Binary' - -name = "VampirServer" -version = "9.9.0" - -homepage = 'http://www.vampir.eu' -description = """The VAMPIR software tool provides an easy-to-use framework that enables -developers to quickly display and analyze arbitrary program behavior at any level of detail. -The tool suite implements optimized event analysis algorithms and customizable displays that -enable fast and interactive rendering of very complex performance monitoring data. -""" - -usage = """ -To start VampirServer -module load Vampir VampirServer -vampir & -BATCH_OPT="--account=<budget> --partition=<partition>" vampirserver start -n 4 mpi -(note server + port + server_id) -- Use it -Vampir GUI-> open other -> remote file -> server + port -- To stop VampirServer -vampirserver stop <server_id> -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -toolchainopts = {"usempi": True} - -sources = ['vampirserver-%s-linux-x86_64-setup.sh' % (version)] - -install_cmd = ('./vampirserver-%(version)s-linux-x86_64-setup.sh --silent --instdir=%(installdir)s ' - '&& %(installdir)s/bin/vampirserver config --silent') - -sanity_check_paths = { - 'files': ["bin/vampirserver", "doc/vampirserver-manual.pdf"], - 'dirs': [] -} - -# Remove Cray-specific 'ap' launcher, -# use SLURM launcher as MPI launcher and default -postinstallcmds = [ - 'rm %(installdir)s/etc/server/launcher/ap', - '''sed -i s/'BATCH_OPT=""'/'#BATCH_OPT=""'/g %(installdir)s/etc/server/launcher/custom/slurm''', - 'cp %(installdir)s/etc/server/launcher/custom/slurm %(installdir)s/etc/server/launcher/mpi', -] - -modextravars = { - 'VAMPIR_LICENSE': '/p/software/jusuf/licenses/vampir/vampir.license', -} - -moduleclass = 'perf' diff --git a/Overlays/juwels_overlay/e/Embree/Embree-3.8.0-GCC-9.3.0.eb b/Overlays/juwels_overlay/e/Embree/Embree-3.8.0-GCC-9.3.0.eb deleted file mode 100644 index 0daf9d3448bf03489634c55ca244e193be36fa98..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/e/Embree/Embree-3.8.0-GCC-9.3.0.eb +++ /dev/null @@ -1,58 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'Embree' -version = '3.8.0' - -homepage = 'http://www.ospray.org/' -description = """ -Embree is a collection of high-performance ray tracing kernels, developed at Intel. The target user of Embree are -graphics application engineers that want to improve the performance of their application by leveraging the optimized ray -tracing kernels of Embree. The kernels are optimized for photo-realistic rendering on the latest Intel processors with -support for SSE, AVX, AVX2, and AVX512. -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} -toolchainopts = {'optarch': True, 'pic': True} - -source_urls = ['https://github.com/embree/embree/archive/'] -sources = ['v%(version)s.tar.gz'] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('ispc', '1.12.0', '', SYSTEM), -] - -dependencies = [ - ('X11', '20200222'), - ('OpenGL', '2020'), - ('freeglut', '3.2.1'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('ImageMagick', '7.0.10-25'), - ('OpenEXR', '2.5.2'), - ('tbb', '2020.3'), -] - -separate_build_dir = True - -configopts = '-DCMAKE_BUILD_TYPE=Release ' -configopts += '-DEMBREE_ISPC_SUPPORT=ON ' -configopts += '-DEMBREE_TASKING_SYSTEM=TBB ' -configopts += '-DEMBREE_TBB_ROOT=$EBROOTTBB ' -# Select highest supported ISA (SSE2, SSE4.2, AVX, AVX2, AVX512KNL, AVX512SKX, or NONE) -configopts += '-DEMBREE_MAX_ISA=AVX512SKX ' -configopts += '-DEMBREE_TUTORIALS=OFF ' - -sanity_check_paths = { - 'dirs': ['include/embree3'], - 'files': ['lib64/libembree3.so'] -} - -modextrapaths = { - 'CMAKE_MODULE_PATH': 'lib64/cmake/embree-%(version)s/' -} - -moduleclass = 'vis' diff --git a/Overlays/juwels_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb b/Overlays/juwels_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb deleted file mode 100644 index 030ece8ba92f6d9c2db0bd9b7ca5aa461175d250..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,59 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2020.2.0' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.18-28964561.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['604769a55a72adce8f1513fcacb36d7cf5b5e3cc99b65d6a20e4d5e987344cb0'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - # ('nvidia-driver', 'default', '', SYSTEM), - ('X11', '20200222') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround 1) (find) due to wrong permissions once the files are extracted from the .run file -# Workaround 2) (mv) due to CentOS 8 coming with a newer libk5crypto.so which doesn't have the symbols -# anymore needed by Nsight Compute's own libcrypto.so. Removing / renaming the shipped -# libcrypto.so makes Nsight Compute pick up the system libcrypto.so and everything is -# grand again. Bug has been filed by Andreas, 13.10.2020 - -local_libcrypto_path = '%(installdir)s/host/linux-desktop-glibc_2_11_3-x64/libcrypto.so' -postinstallcmds = [ - 'find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;', - 'mv %s %s.bak' % (local_libcrypto_path, local_libcrypto_path), - 'mv %s.1.1 %s.1.1.bak' % (local_libcrypto_path, local_libcrypto_path) -] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Overlays/juwels_overlay/n/nvidia-driver/nvidia-driver-default.eb b/Overlays/juwels_overlay/n/nvidia-driver/nvidia-driver-default.eb deleted file mode 100644 index 4606bd345167e79e6690a45a391f96a9957d48b7..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/n/nvidia-driver/nvidia-driver-default.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'nvidia-driver' -version = 'default' -realversion = '460.32.03' - -homepage = 'https://developer.nvidia.com/cuda-toolkit' -description = """This is a set of libraries normally installed by the NVIDIA driver installer.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['http://us.download.nvidia.com/tesla/%s/' % realversion] -sources = ['NVIDIA-Linux-x86_64-%s.run' % realversion] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'system' diff --git a/Overlays/juwels_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb b/Overlays/juwels_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb deleted file mode 100644 index ab139ab9b6585b6378b990ab154463b778a96764..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.9.0', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/juwels_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/juwels_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 155e0c334d0fa886a3d2eaae3ba013e91cdf38d4..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'iccifort', 'version': '2020.2.254-GCC-9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.9.0', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/juwels_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb b/Overlays/juwels_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb deleted file mode 100644 index 374e4e52bb3e11daecccf8504d074bc23f673c41..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb +++ /dev/null @@ -1,98 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyTorch-Geometric' -version = '1.6.3' -local_pytorch_ver = '1.7.0' -versionsuffix = '-Python-%%(pyver)s-PyTorch-%s' % local_pytorch_ver - -homepage = 'https://github.com/rusty1s/pytorch_geometric' -description = "PyTorch Geometric (PyG) is a geometric deep learning extension library for PyTorch." - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -local_pysuff = '-Python-%(pyver)s' -dependencies = [ - ('Python', '3.8.5'), - ('PyTorch', local_pytorch_ver, local_pysuff), - ('numba', '0.51.1', local_pysuff), - ('h5py', '2.10.0', '-serial%s' % local_pysuff), - ('scikit', '2020', local_pysuff), - ('torchvision', '0.8.2', local_pysuff), - ('trimesh', '3.8.11', local_pysuff), - ('METIS', '5.1.0', '-IDX64'), -] - -use_pip = True - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_download_dep_fail = True -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('gdist', '1.0.3', { - 'source_urls': ['https://pypi.python.org/packages/source/g/gdist'], - 'modulename': 'gdist', - }), - ('rdflib', '5.0.0', { - 'checksums': ['78149dd49d385efec3b3adfbd61c87afaf1281c30d3fcaf1b323b34f603fb155'], - 'modulename': 'rdflib', - }), - ('googledrivedownloader', '0.4', { - 'checksums': ['4b34c1337b2ff3bf2bd7581818efbdcaea7d50ffd484ccf80809688f5ca0e204'], - 'modulename': 'google_drive_downloader', - }), - ('plyfile', '0.7.2', { - 'checksums': ['59a25845d00a51098e6c9147c3c96ce89ad97395e256a4fabb4aed7cf7db5541'], - }), - ('torch_scatter', '2.0.5', { - 'patches': ['torch_scatter-2.0.5-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '2.0.5.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_scatter/archive/'], - }), - ('torch_sparse', '0.6.8', { - 'patches': ['torch_sparse-0.6.8-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'preinstallopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'source_tmpl': '0.6.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_sparse/archive/'], - }), - ('torch_cluster', '1.5.8', { - 'patches': ['torch_cluster-1.5.8-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.5.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_cluster/archive/'], - }), - ('torch_spline_conv', '1.2.0', { - 'patches': ['torch_spline_conv-1.2.0-sm_70.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.2.0.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_spline_conv/archive'], - }), - ('ase', '3.21.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/ase'], - 'modulename': 'ase', - }), - ('python-louvain', '0.15', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-louvain'], - 'checksums': ['2a856edfbe29952a60a5538a84bb78cca18f6884a88b9325e85a11c8dd4917eb'], - 'modulename': 'community', - }), - ('tqdm', '4.56.0', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tqdm'], - 'modulename': 'tqdm', - }), - ('torch_geometric', version, { - 'checksums': ['347f693bebcc8a621eda4867dafab91c04db5f596d7ed7ecb89b242f8ab5c6a1'], - }), -] - -sanity_pip_check = True - -moduleclass = 'devel' diff --git a/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch b/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch deleted file mode 100644 index 25bbf0ec24a03461821a0b38e4ef043efcb052d0..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:10:11.609352000 +0100 -+++ setup.py 2021-01-20 10:10:37.525550350 +0100 -@@ -39,7 +39,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch b/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch deleted file mode 100644 index 94ae43ed4ef1a4d0788e0d08a0564b8978868506..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 09:53:22.085271000 +0100 -+++ setup.py 2021-01-20 09:53:54.835241801 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch b/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch deleted file mode 100644 index 1a4f2d0fbc9e83c52a46f82dd5abfc6bdf00af82..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:07:15.518446000 +0100 -+++ setup.py 2021-01-20 10:07:51.389877000 +0100 -@@ -53,7 +53,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - if sys.platform == 'win32': diff --git a/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch b/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch deleted file mode 100644 index e0a3e0ccf186aef0a4c22892b79ae0db810f351c..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_70.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:12:33.326687000 +0100 -+++ setup.py 2021-01-20 10:12:51.492198482 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_70,code=sm_70', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/juwels_overlay/p/pscom/pscom-5.4-default.eb b/Overlays/juwels_overlay/p/pscom/pscom-5.4-default.eb deleted file mode 100644 index 309d976fd1e0697947bee82968b946272d646123..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/p/pscom/pscom-5.4-default.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'pscom' -# Create drop-in replacement version that ensures over-riding behaviour -version = "5.4-default" -local_realversion = "5.4.7-1" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = ['https://github.com/ParaStation/%(name)s/archive/'] -sources = ['%%(name)s-%s.tar.bz2' % local_realversion] - -builddependencies = [ - ('popt', '1.16'), - ('CUDA', '11.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('UCX', '1.9.0'), -] - -build_type = 'RelWithDebInfo' - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '-DCUDA_ENABLED=ON' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % local_realversion, -} - -moduleclass = 'tools' diff --git a/Overlays/juwels_overlay/u/UCX/UCX-1.8.1.eb b/Overlays/juwels_overlay/u/UCX/UCX-1.8.1.eb deleted file mode 100644 index 740e9d2e3d5f84bb1f22280bac5c90f32d8f4e33..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/u/UCX/UCX-1.8.1.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.8.1' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -# configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support -configopts += '--without-cm ' # Disable IB CM - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/juwels_overlay/u/UCX/UCX-1.9.0.eb b/Overlays/juwels_overlay/u/UCX/UCX-1.9.0.eb deleted file mode 100644 index be5e7d345e87b84378bcbf8af348c6888b20c666..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/u/UCX/UCX-1.9.0.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.9.0' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support -configopts += '--without-cm ' # Disable IB CM - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/juwels_overlay/v/Vampir/Vampir-9.9.0.eb b/Overlays/juwels_overlay/v/Vampir/Vampir-9.9.0.eb deleted file mode 100644 index 413d5167f8c0eca10b39fc08171dfa6eaaac8811..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/v/Vampir/Vampir-9.9.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'Binary' - -name = "Vampir" -version = "9.9.0" -local_archsuffix = "-linux-x86_64" - -homepage = 'http://www.vampir.eu' -description = """The VAMPIR software tool provides an easy-to-use framework that enables -developers to quickly display and analyze arbitrary program behavior at any level of detail. -The tool suite implements optimized event analysis algorithms and customizable displays that -enable fast and interactive rendering of very complex performance monitoring data. - -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -sources = ['vampir-%s%s-setup.sh' % (version, local_archsuffix)] - -install_cmd = './vampir-%(version)s-linux-x86_64-setup.sh --silent --instdir=%(installdir)s' - -sanity_check_paths = { - 'files': ["bin/vampir", "doc/vampir-manual.pdf"], - 'dirs': [] -} - -modextravars = { - 'VAMPIR_LICENSE': '/p/software/juwels/licenses/vampir/vampir.license', -} - -moduleclass = 'tools' diff --git a/Overlays/juwels_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb b/Overlays/juwels_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb deleted file mode 100644 index 44937799673e5ac07ab11ea32c4f90678a69e7ef..0000000000000000000000000000000000000000 --- a/Overlays/juwels_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'Binary' - -name = "VampirServer" -version = "9.9.0" - -homepage = 'http://www.vampir.eu' -description = """The VAMPIR software tool provides an easy-to-use framework that enables -developers to quickly display and analyze arbitrary program behavior at any level of detail. -The tool suite implements optimized event analysis algorithms and customizable displays that -enable fast and interactive rendering of very complex performance monitoring data. -""" - -usage = """ -To start VampirServer -module load Vampir VampirServer -vampir & -BATCH_OPT="--account=<budget> --partition=<partition>" vampirserver start -n 4 mpi -(note server + port + server_id) -- Use it -Vampir GUI-> open other -> remote file -> server + port -- To stop VampirServer -vampirserver stop <server_id> -""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -toolchainopts = {"usempi": True} - -sources = ['vampirserver-%s-linux-x86_64-setup.sh' % (version)] - -install_cmd = ('./vampirserver-%(version)s-linux-x86_64-setup.sh --silent --instdir=%(installdir)s ' - '&& %(installdir)s/bin/vampirserver config --silent') - -sanity_check_paths = { - 'files': ["bin/vampirserver", "doc/vampirserver-manual.pdf"], - 'dirs': [] -} - -# Remove Cray-specific 'ap' launcher, -# use SLURM launcher as MPI launcher and default -postinstallcmds = [ - 'rm %(installdir)s/etc/server/launcher/ap', - '''sed -i s/'BATCH_OPT=""'/'#BATCH_OPT=""'/g %(installdir)s/etc/server/launcher/custom/slurm''', - 'cp %(installdir)s/etc/server/launcher/custom/slurm %(installdir)s/etc/server/launcher/mpi', -] - -modextravars = { - 'VAMPIR_LICENSE': '/p/software/juwels/licenses/vampir/vampir.license', -} - -moduleclass = 'perf' diff --git a/Overlays/juwelsbooster_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb b/Overlays/juwelsbooster_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb deleted file mode 100644 index 6e70c8f5427ba06c25874a8406235be946eb50b6..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-10.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'Doxygen' -version = '1.8.18' - -homepage = 'http://www.doxygen.org' -description = """Doxygen is a documentation system for C++, C, Java, Objective-C, Python, - IDL (Corba and Microsoft flavors), Fortran, VHDL, PHP, C#, and to some extent D. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '10.3.0'} - -sources = ['%(namelower)s-%(version)s.src.tar.gz'] -source_urls = ['http://doxygen.nl/files/'] - -builddependencies = [ - ('binutils', '2.36.1'), - ('CMake', '3.18.0', '', SYSTEM), - # flex 2.6.4 provokes a segmentation fault in M4, when compiling Doxygen in CentOS8, so we fall back to 2.6.3 - ('flex', '2.6.3'), - ('Bison', '3.7.6'), -] - -moduleclass = 'devel' diff --git a/Overlays/juwelsbooster_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb b/Overlays/juwelsbooster_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb deleted file mode 100644 index c088cee99904ef9cc765b9d50f30006106492d51..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/d/Doxygen/Doxygen-1.8.18-GCCcore-9.3.0.eb +++ /dev/null @@ -1,24 +0,0 @@ -name = 'Doxygen' -version = '1.8.18' - -homepage = 'http://www.doxygen.org' -description = """Doxygen is a documentation system for C++, C, Java, Objective-C, Python, - IDL (Corba and Microsoft flavors), Fortran, VHDL, PHP, C#, and to some extent D. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -sources = ['%(namelower)s-%(version)s.src.tar.gz'] -source_urls = ['http://doxygen.nl/files/'] - -builddependencies = [ - ('binutils', '2.34'), - ('CMake', '3.18.0'), - # flex 2.6.4 provokes a segmentation fault in M4, when compiling Doxygen in CentOS8, so we fall back to 2.6.3 - ('flex', '2.6.3'), - ('Bison', '3.6.4'), -] - -moduleclass = 'devel' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb deleted file mode 100644 index 80f1ba62377bc954f4dea278ef460e164e57e66c..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.05.001-gpsmkl-2020-gpu.eb +++ /dev/null @@ -1,97 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' -versionsuffix = '-gpu' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_gf_lp64 --lmkl_sequential -lmkl_core -lmkl_blacs_intelmpi_lp64 --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_80" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb deleted file mode 100644 index a4144ed481fc8771c47e3dc832cd3f285b0f4636..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.05.001-intel-para-2020-gpu.eb +++ /dev/null @@ -1,96 +0,0 @@ -name = 'ELPA' -version = '2020.05.001' -versionsuffix = '-gpu' - -homepage = 'http://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_intel_lp64 --lmkl_sequential -lmkl_core -lmkl_blacs_intelmpi_lp64 --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2020'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = ['http://elpa.mpcdf.mpg.de/html/Releases/%(version)s/'] -sources = [SOURCELOWER_TAR_GZ] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_80" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gomkl-2021-gpu.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gomkl-2021-gpu.eb deleted file mode 100644 index 1571aa999de252cfaac1cc586dc482472a3de04a..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gomkl-2021-gpu.eb +++ /dev/null @@ -1,107 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -${MKLROOT}/lib/intel64/libmkl_blacs_openmpi_lp64.a --lmkl_gf_lp64 -lmkl_sequential -lmkl_core --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_80" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gomkl-2021.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gomkl-2021.eb deleted file mode 100644 index 95b13a868884a3dd8ef057983b988feb4de1aa72..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gomkl-2021.eb +++ /dev/null @@ -1,94 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -${MKLROOT}/lib/intel64/libmkl_blacs_openmpi_lp64.a --lmkl_gf_lp64 -lmkl_sequential[-lmkl_gnu_thread] --lmkl_core -lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gomkl', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gpsmkl-2021-gpu.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gpsmkl-2021-gpu.eb deleted file mode 100644 index 32137f1f2d00422ac81378e416c6362928acc9a8..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gpsmkl-2021-gpu.eb +++ /dev/null @@ -1,106 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 --lmkl_gf_lp64 -lmkl_sequential -lmkl_core --lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_80" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gpsmkl-2021.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gpsmkl-2021.eb deleted file mode 100644 index caae290801be91602f08529f4e9ff5622fb2cb36..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-gpsmkl-2021.eb +++ /dev/null @@ -1,93 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_gf_lp64 --lmkl_sequential[-lmkl_gnu_thread] --lmkl_core -lgomp -lpthread -lm -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'gpsmkl', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'export LDFLAGS="-lm $LDFLAGS" && ' -preconfigopts += 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-intel-para-2021-gpu.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-intel-para-2021-gpu.eb deleted file mode 100644 index 637704d18ac9177a428df950ba9d942fa93837c0..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-intel-para-2021-gpu.eb +++ /dev/null @@ -1,105 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 --lmkl_intel_lp64 -lmkl_sequential -lmkl_core --liomp5 -lpthread -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_80" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-intel-para-2021.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-intel-para-2021.eb deleted file mode 100644 index 6ae1a590c091809e29a6c690661047a09dd2db72..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-intel-para-2021.eb +++ /dev/null @@ -1,93 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -lmkl_intel_lp64 --lmkl_sequential[-lmkl_intel_thread] --lmkl_core -liomp5 -lpthread -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'intel-para', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] -checksums = ['15591f142eeaa98ab3201d27ca9ac328e21beabf0803b011a04183fcaf6efdde'] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-iomkl-2021-gpu.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-iomkl-2021-gpu.eb deleted file mode 100644 index 9fab0aa882045f9471c463ab417f019bff38e88d..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-iomkl-2021-gpu.eb +++ /dev/null @@ -1,107 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' -versionsuffix = '-gpu' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version with support for GPUs. To get -full GPU performance it is necessary to enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script. With that option usage of OpenMP is not possible. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE/ -I$ELPA_INCLUDE/elpa -I$ELPA_MODULES - -and linked with - --L$EBROOTELPA/lib -lelpa --lmkl_scalapack_lp64 -${MKLROOT}/lib/intel64/libmkl_blacs_openmpi_lp64.a --lmkl_intel_lp64 -lmkl_sequential -lmkl_core --liomp5 -lpthread -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'openmp': False, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] -checksums = ['15591f142eeaa98ab3201d27ca9ac328e21beabf0803b011a04183fcaf6efdde'] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -dependencies = [ - ('CUDA', '11.3', '', True), -] - -configopts = '--enable-gpu ' -configopts += '--with-GPU-compute-capability="sm_80" ' - -prebuildopts = 'export LIBS="$LIBS -lcudart -lcublas" && ' - - -with_single = False - - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/Makefile_pure', - 'rm %(installdir)s/examples/Makefile_hybrid', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/C/Makefile_examples_pure', - 'rm %(installdir)s/examples/C/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_pure', - 'rm %(installdir)s/examples/Fortran/Makefile_examples_hybrid', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'cp config.h config-f90.h %(installdir)s/include/elpa-%(version)s/elpa/', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use GPUs you should enable Nvidia Multi-process Service -by adding -#SBATCH --gres=gpu:4 --partition=gpus --cuda-mps -in your batch script -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-iomkl-2021.eb b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-iomkl-2021.eb deleted file mode 100644 index d66eade6ea3bd892c11faa3a28d49b9b526e7dc3..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001-iomkl-2021.eb +++ /dev/null @@ -1,94 +0,0 @@ -name = 'ELPA' -version = '2020.11.001' - -homepage = 'https://elpa.rzg.mpg.de' -description = """Eigenvalue SoLvers for Petaflop-Applications. ELPA has been installed as module in -$EBROOTELPA ($ELPA_ROOT is also defined). This installation -contains the pure MPI version and the hybrid MPI/OpenMP version. -Notice: If you want to use OpenMP threads you have to set -export ELPA_DEFAULT_omp=<number of threads per MPI process> -in your batch job. - -Several assembly kernels have been compiled. They can be chosen at runtime when calling the library or -with the environment variables REAL_ELPA_KERNEL or COMPLEX_ELPA_KERNEL. - -An example is -export REAL_ELPA_KERNEL=REAL_ELPA_KERNEL_GENERIC -which chooses the generic real kernel for elpa2. -Starting with version 2019.11.001 the legacy interface is no longer available. -""" - -usage = """You can get an overview over the available kernels by loading ELPA and then submitting a batch job with - -srun --ntasks=1 $EBROOTELPA/bin/elpa2_print_kernels - -Programs using this ELPA library have to be compiled with - --I$ELPA_INCLUDE[_OPENMP]/ -I$ELPA_INCLUDE[_OPENMP]/elpa -I$ELPA_MODULES[_OPENMP] - -and linked with - --L$EBROOTELPA/lib -lelpa[_openmp] --lmkl_scalapack_lp64 -${MKLROOT}/lib/intel64/libmkl_blacs_openmpi_lp64.a --lmkl_intel_lp64 -lmkl_sequential[-lmkl_intel_thread] --lmkl_core -liomp -lpthread -ldl -lstdc++ -""" - -examples = 'Examples can be found in $EBROOTELPA/examples' - -site_contacts = 'I. Gutheil (i.gutheil@fz-juelich.de)' - -toolchain = {'name': 'iomkl', 'version': '2021'} -toolchainopts = {'openmp': True, 'usempi': True} - -source_urls = [ - 'https://elpa.rzg.mpg.de/software/tarball-archive/Releases/%(version)s/'] -sources = ["elpa-%(version)s.tar.gz"] -checksums = ['15591f142eeaa98ab3201d27ca9ac328e21beabf0803b011a04183fcaf6efdde'] - -patches = [ - 'ELPA-%(version)s_install-libelpatest.patch', -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -preconfigopts = 'autoreconf && ' - -with_single = False - -postinstallcmds = [ - 'cp -r %(builddir)s/elpa-%(version)s/examples %(installdir)s/examples/', - 'rm %(installdir)s/examples/*.orig', - 'rm %(installdir)s/examples/*_cuda', - 'rm %(installdir)s/examples/C/*.orig', - 'rm %(installdir)s/examples/C/*_cuda', - 'rm %(installdir)s/examples/Fortran/*.orig', - 'rm %(installdir)s/examples/Fortran/*_cuda', - 'cp %(builddir)s/elpa-%(version)s/test/shared/generated.h %(installdir)s/examples/C/generated.h', - 'cp config.h config-f90.h %(installdir)s/include/elpa_openmp-%(version)s/elpa/', - 'grep -v WITH_OPENMP config.h > %(installdir)s/include/elpa-%(version)s/elpa/config.h', - 'grep -v WITH_OPENMP config-f90.h > %(installdir)s/include/elpa-%(version)s/elpa/config-f90.h', - 'cp %(builddir)s/elpa-%(version)s/private_modules/* %(installdir)s/include/elpa-%(version)s/modules', - 'cp %(builddir)s/elpa-%(version)s/test_modules/* %(installdir)s/include/elpa-%(version)s/modules', -] - -modextravars = { - 'ELPA_ROOT': '%(installdir)s', - 'ELPAROOT': '%(installdir)s', - 'ELPA_INCLUDE': '%(installdir)s/include/elpa-%(version)s/', - 'ELPA_INCLUDE_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/', - 'ELPA_LIB': '%(installdir)s/lib', - 'ELPA_LIB_OPENMP': '%(installdir)s/lib', - 'ELPA_MODULES': '%(installdir)s/include/elpa-%(version)s/modules', - 'ELPA_MODULES_OPENMP': '%(installdir)s/include/elpa_openmp-%(version)s/modules', -} - -modloadmsg = """ -Notice: If you want to use OpenMP threads you have to set -$ export ELPA_DEFAULT_omp=<number of threads per MPI process> -""" - -moduleclass = 'math' diff --git a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001_install-libelpatest.patch b/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001_install-libelpatest.patch deleted file mode 100644 index f4b825b158b418f264adf625455bff32ed7cc245..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/e/ELPA/ELPA-2020.11.001_install-libelpatest.patch +++ /dev/null @@ -1,10535 +0,0 @@ ---- elpa-2020.11.001/Makefile.am 2020-12-22 08:42:55.000000000 +0100 -+++ elpa-2020.11.001_ok/Makefile.am 2021-02-02 12:59:31.635462357 +0100 -@@ -628,7 +628,7 @@ - test_program_fcflags = $(AM_FCFLAGS) $(FC_MODOUT)test_modules $(FC_MODINC)test_modules $(FC_MODINC)modules $(FC_MODINC)private_modules - - # library with shared sources for the test files --noinst_LTLIBRARIES += libelpatest@SUFFIX@.la -+lib_LTLIBRARIES += libelpatest@SUFFIX@.la - libelpatest@SUFFIX@_la_FCFLAGS = $(test_program_fcflags) - libelpatest@SUFFIX@_la_SOURCES = \ - test/shared/tests_variable_definitions.F90 \ -diff -ruN elpa-2020.11.001/examples/C/Makefile_examples_hybrid elpa-2020.11.001_ok/examples/C/Makefile_examples_hybrid ---- elpa-2020.11.001/examples/C/Makefile_examples_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/Makefile_examples_hybrid 2021-07-02 10:32:19.117127000 +0200 -@@ -0,0 +1,31 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -qopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+# GCC -+# F90 = mpif90 -O3 -fopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB_OPENMP) -lelpa_openmp -lelpatest_openmp $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -qopenmp -+# GCC -+# CC = mpicc -O3 -fopenmp -+ -+all: test_real_1stage_hybrid test_real_2stage_all_kernels_hybrid test_autotune_hybrid test_multiple_objs_hybrid -+ -+test_real_1stage_hybrid: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DWITH_OPENMP -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels_hybrid: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DWITH_OPENMP -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_autotune_hybrid: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs_hybrid: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.11.001/examples/C/Makefile_examples_pure elpa-2020.11.001_ok/examples/C/Makefile_examples_pure ---- elpa-2020.11.001/examples/C/Makefile_examples_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/Makefile_examples_pure 2021-07-02 10:37:44.708232000 +0200 -@@ -0,0 +1,27 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs -+ -+test_real_1stage: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_autotune: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.11.001/examples/C/Makefile_examples_pure_cuda elpa-2020.11.001_ok/examples/C/Makefile_examples_pure_cuda ---- elpa-2020.11.001/examples/C/Makefile_examples_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/Makefile_examples_pure_cuda 2021-07-02 10:40:47.722615000 +0200 -@@ -0,0 +1,27 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -lcublas -lcudart -+CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs -+ -+test_real_1stage: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_real_2stage_all_kernels: test.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test.c $(LIBS) -+ -+test_autotune: test_autotune.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.c $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.c -+ $(CC) -DCURRENT_API_VERSION=20190524 -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.c $(LIBS) -+ -diff -ruN elpa-2020.11.001/examples/C/test_autotune.c elpa-2020.11.001_ok/examples/C/test_autotune.c ---- elpa-2020.11.001/examples/C/test_autotune.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/test_autotune.c 2021-07-02 10:41:06.048699058 +0200 -@@ -0,0 +1,335 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <string.h> -+#include <stdio.h> -+#include <stdlib.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+//#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+//#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+//#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# else -+# define MATRIX_TYPE complex float -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# else -+# define MATRIX_TYPE complex double -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+ -+#ifdef HAVE_64BIT_INTEGER_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#include "generated.h" -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE status; -+ int error_elpa; -+ elpa_t handle; -+ -+ elpa_autotune_t autotune_handle; -+ C_INT_TYPE i, unfinished; -+ -+ C_INT_TYPE value; -+#ifdef WITH_MPI -+ MPI_Init(&argc, &argv); -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocs); -+ MPI_Comm_rank(MPI_COMM_WORLD, &myid); -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_real_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_real_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_complex_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_complex_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ handle = elpa_allocate(); -+#else -+ handle = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ /* Set parameters */ -+ elpa_set(handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (myid == 0) { -+ printf("Setting the matrix parameters na=%d, nev=%d \n",na,nev); -+ } -+ elpa_set(handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(handle)); -+ -+ elpa_set(handle, "gpu", 0, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ autotune_handle = elpa_autotune_setup(handle, ELPA_AUTOTUNE_FAST, ELPA_AUTOTUNE_DOMAIN_REAL, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ /* mimic 20 scf steps */ -+ -+ for (i=0; i < 20; i++) { -+ -+ unfinished = elpa_autotune_step(handle, autotune_handle, &error_elpa); -+ -+ if (unfinished == 0) { -+ if (myid == 0) { -+ printf("ELPA autotuning finished in the %d th scf step \n",i); -+ } -+ break; -+ } -+ if (myid == 0) { -+ printf("The current setting of the ELPA object: \n"); -+ elpa_print_settings(handle, &error_elpa); -+ -+ printf("The state of the autotuning: \n"); -+ elpa_autotune_print_state(handle, autotune_handle, &error_elpa); -+ } -+ -+ -+ /* Solve EV problem */ -+ elpa_eigenvectors(handle, a, ev, z, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* check the results */ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_real_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(double)); -+ -+#else -+ status = check_correctness_evp_numeric_residuals_real_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(float)); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_complex_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex double)); -+#else -+ status = check_correctness_evp_numeric_residuals_complex_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex float)); -+#endif -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ break; -+ } -+ printf("hier %d \n",myid); -+ } -+ -+ if (unfinished == 1) { -+ if (myid == 0) { -+ printf("ELPA autotuning did not finished during %d scf cycles\n",i); -+ -+ } -+ -+ } -+ elpa_autotune_set_best(handle, autotune_handle, &error_elpa); -+ -+ if (myid == 0) { -+ printf("The best combination found by the autotuning:\n"); -+ elpa_autotune_print_best(handle, autotune_handle, &error_elpa); -+ } -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_autotune_deallocate(autotune_handle); -+ elpa_deallocate(handle); -+#else -+ elpa_autotune_deallocate(autotune_handle, &error_elpa); -+ elpa_deallocate(handle, &error_elpa); -+#endif -+ elpa_uninit(&error_elpa); -+ -+ if (myid == 0) { -+ printf("\n"); -+ printf("2stage ELPA real solver complete\n"); -+ printf("\n"); -+ } -+ -+ if (status ==0){ -+ if (myid ==0) { -+ printf("All ok!\n"); -+ } -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.11.001/examples/C/test.c elpa-2020.11.001_ok/examples/C/test.c ---- elpa-2020.11.001/examples/C/test.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/test.c 2021-07-02 10:41:14.785601626 +0200 -@@ -0,0 +1,339 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <stdio.h> -+#include <stdlib.h> -+#include <string.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_GENERALIZED_DECOMP_EIGENPROBLEM -+#define TEST_GENERALIZED_EIGENPROBLEM -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_real_single_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_real_single_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_real_single_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_real_single_f -+# else -+# define MATRIX_TYPE complex float -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_complex_single_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_complex_single_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_complex_single_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_complex_single_f -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_real_double_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_real_double_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_real_double_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_real_double_f -+# else -+# define MATRIX_TYPE complex double -+# define PREPARE_MATRIX_RANDOM prepare_matrix_random_complex_double_f -+# define PREPARE_MATRIX_RANDOM_SPD prepare_matrix_random_spd_complex_double_f -+# define CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS check_correctness_evp_numeric_residuals_complex_double_f -+# define CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS check_correctness_evp_gen_numeric_residuals_complex_double_f -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#define C_INT_MPI_TYPE long int -+#else -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#define C_INT_MPI_TYPE int -+#endif -+#include "generated.h" -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_MPI_TYPE myidMPI, nprocsMPI; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ C_INT_MPI_TYPE provided_mpi_thread_level; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z, *b, *bs; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE error, status; -+ int error_elpa; -+ -+ elpa_t handle; -+ -+ int value; -+#ifdef WITH_MPI -+#ifndef WITH_OPENMP_TRADITIONAL -+ MPI_Init(&argc, &argv); -+#else -+ MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &provided_mpi_thread_level); -+ -+ if (provided_mpi_thread_level != MPI_THREAD_MULTIPLE) { -+ fprintf(stderr, "MPI ERROR: MPI_THREAD_MULTIPLE is not provided on this system\n"); -+ MPI_Finalize(); -+ exit(77); -+ } -+#endif -+ -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocsMPI); -+ nprocs = (C_INT_TYPE) nprocsMPI; -+ MPI_Comm_rank(MPI_COMM_WORLD, &myidMPI); -+ myid = (C_INT_TYPE) myidMPI; -+ -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+ PREPARE_MATRIX_RANDOM(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ b = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ bs = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ PREPARE_MATRIX_RANDOM_SPD(na, myid, na_rows, na_cols, sc_desc, b, z, bs, nblk, np_rows, np_cols, my_prow, my_pcol); -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+ handle = elpa_allocate(&error_elpa); -+ //assert_elpa_ok(error_elpa); -+ -+ /* Set parameters */ -+ elpa_set(handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (myid == 0) { -+ printf("Setting the matrix parameters na=%d, nev=%d \n",na,nev); -+ } -+ elpa_set(handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ elpa_set(handle, "blacs_context", (int) my_blacs_ctxt, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(handle)); -+ -+ /* Set tunables */ -+#ifdef TEST_SOLVER_1STAGE -+ elpa_set(handle, "solver", ELPA_SOLVER_1STAGE, &error_elpa); -+#else -+ elpa_set(handle, "solver", ELPA_SOLVER_2STAGE, &error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(handle, "gpu", TEST_GPU, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#if defined(TEST_SOLVE_2STAGE) && defined(TEST_KERNEL) -+# ifdef TEST_COMPLEX -+ elpa_set(handle, "complex_kernel", TEST_KERNEL, &error_elpa); -+# else -+ elpa_set(handle, "real_kernel", TEST_KERNEL, &error_elpa); -+# endif -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ elpa_get(handle, "solver", &value, &error_elpa); -+ if (myid == 0) { -+ printf("Solver is set to %d \n", value); -+ } -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ elpa_generalized_eigenvectors(handle, a, b, ev, z, 0, &error_elpa); -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ //a = as, so that the problem can be solved again -+ memcpy(a, as, na_rows * na_cols * sizeof(MATRIX_TYPE)); -+ elpa_generalized_eigenvectors(handle, a, b, ev, z, 1, &error_elpa); -+#endif -+#else -+ /* Solve EV problem */ -+ elpa_eigenvectors(handle, a, ev, z, &error_elpa); -+#endif -+ assert_elpa_ok(error_elpa); -+ -+ elpa_deallocate(handle, &error_elpa); -+ elpa_uninit(&error_elpa); -+ -+ /* check the results */ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ status = CHECK_CORRECTNESS_EVP_GEN_NUMERIC_RESIDUALS(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs); -+#else -+ status = CHECK_CORRECTNESS_EVP_NUMERIC_RESIDUALS(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ } -+ if (status ==0){ -+ printf("All ok!\n"); -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ free(b); -+ free(bs); -+#endif -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.11.001/examples/C/test_multiple_objs.c elpa-2020.11.001_ok/examples/C/test_multiple_objs.c ---- elpa-2020.11.001/examples/C/test_multiple_objs.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/C/test_multiple_objs.c 2021-07-02 10:41:21.945970887 +0200 -@@ -0,0 +1,387 @@ -+/* This file is part of ELPA. -+ -+ The ELPA library was originally created by the ELPA consortium, -+ consisting of the following organizations: -+ -+ - Max Planck Computing and Data Facility (MPCDF), formerly known as -+ Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+ - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+ Informatik, -+ - Technische Universität München, Lehrstuhl für Informatik mit -+ Schwerpunkt Wissenschaftliches Rechnen , -+ - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+ - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+ Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+ and -+ - IBM Deutschland GmbH -+ -+ -+ More information can be found here: -+ http://elpa.mpcdf.mpg.de/ -+ -+ ELPA is free software: you can redistribute it and/or modify -+ it under the terms of the version 3 of the license of the -+ GNU Lesser General Public License as published by the Free -+ Software Foundation. -+ -+ ELPA is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public License -+ along with ELPA. If not, see <http://www.gnu.org/licenses/> -+ -+ ELPA reflects a substantial effort on the part of the original -+ ELPA consortium, and we ask you to respect the spirit of the -+ license that we chose: i.e., please contribute any changes you -+ may have back to the original ELPA library distribution, and keep -+ any derivatives of ELPA under the same license that we chose for -+ the original distribution, the GNU Lesser General Public License. -+*/ -+ -+#include "config.h" -+ -+#include <string.h> -+#include <stdio.h> -+#include <stdlib.h> -+#ifdef WITH_MPI -+#include <mpi.h> -+#endif -+#include <math.h> -+ -+#include <elpa/elpa.h> -+#include <assert.h> -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+//#error "define exactly one of TEST_REAL or TEST_COMPLEX" -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+//#error "define exactly one of TEST_SINGLE or TEST_DOUBLE" -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE)) -+//#error "define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE" -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE float -+# ifdef TEST_REAL -+# define MATRIX_TYPE float -+# else -+# define MATRIX_TYPE complex float -+# endif -+#else -+# define EV_TYPE double -+# ifdef TEST_REAL -+# define MATRIX_TYPE double -+# else -+# define MATRIX_TYPE complex double -+# endif -+#endif -+ -+#define assert_elpa_ok(x) assert(x == ELPA_OK) -+#ifdef HAVE_64BIT_INTEGER_SUPPORT -+#define TEST_C_INT_TYPE_PTR long int* -+#define C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#define C_INT_TYPE long int -+#else -+#define TEST_C_INT_TYPE_PTR int* -+#define C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#define C_INT_TYPE int -+#endif -+ -+#include "generated.h" -+void set_basic_parameters(elpa_t *handle, C_INT_TYPE na, C_INT_TYPE nev, C_INT_TYPE na_rows, C_INT_TYPE na_cols, C_INT_TYPE nblk, C_INT_TYPE my_prow, C_INT_TYPE my_pcol){ -+ int error_elpa; -+ elpa_set(*handle, "na", (int) na, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "nev", (int) nev, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "local_nrows", (int) na_rows, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "local_ncols", (int) na_cols, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "nblk", (int) nblk, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ elpa_set(*handle, "mpi_comm_parent", (int) (MPI_Comm_c2f(MPI_COMM_WORLD)), &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "process_row", (int) my_prow, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(*handle, "process_col", (int) my_pcol, &error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+} -+ -+ -+int main(int argc, char** argv) { -+ /* matrix dimensions */ -+ C_INT_TYPE na, nev, nblk; -+ -+ /* mpi */ -+ C_INT_TYPE myid, nprocs; -+ C_INT_TYPE na_cols, na_rows; -+ C_INT_TYPE np_cols, np_rows; -+ C_INT_TYPE my_prow, my_pcol; -+ C_INT_TYPE mpi_comm; -+ -+ /* blacs */ -+ C_INT_TYPE my_blacs_ctxt, sc_desc[9], info; -+ -+ /* The Matrix */ -+ MATRIX_TYPE *a, *as, *z; -+ EV_TYPE *ev; -+ -+ C_INT_TYPE status; -+ int error_elpa; -+ int gpu, timings, debug; -+ char str[400]; -+ -+ elpa_t elpa_handle_1, elpa_handle_2, *elpa_handle_ptr; -+ -+ elpa_autotune_t autotune_handle; -+ C_INT_TYPE i, unfinished; -+ -+ C_INT_TYPE value; -+#ifdef WITH_MPI -+ MPI_Init(&argc, &argv); -+ MPI_Comm_size(MPI_COMM_WORLD, &nprocs); -+ MPI_Comm_rank(MPI_COMM_WORLD, &myid); -+#else -+ nprocs = 1; -+ myid = 0; -+#endif -+ -+ if (argc == 4) { -+ na = atoi(argv[1]); -+ nev = atoi(argv[2]); -+ nblk = atoi(argv[3]); -+ } else { -+ na = 500; -+ nev = 250; -+ nblk = 16; -+ } -+ -+ for (np_cols = (C_INT_TYPE) sqrt((double) nprocs); np_cols > 1; np_cols--) { -+ if (nprocs % np_cols == 0) { -+ break; -+ } -+ } -+ -+ np_rows = nprocs/np_cols; -+ -+ /* set up blacs */ -+ /* convert communicators before */ -+#ifdef WITH_MPI -+ mpi_comm = MPI_Comm_c2f(MPI_COMM_WORLD); -+#else -+ mpi_comm = 0; -+#endif -+ set_up_blacsgrid_f(mpi_comm, np_rows, np_cols, 'C', &my_blacs_ctxt, &my_prow, &my_pcol); -+ set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, np_rows, np_cols, &na_rows, &na_cols, sc_desc, my_blacs_ctxt, &info); -+ -+ /* allocate the matrices needed for elpa */ -+ a = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ z = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ as = calloc(na_rows*na_cols, sizeof(MATRIX_TYPE)); -+ ev = calloc(na, sizeof(EV_TYPE)); -+ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_real_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_real_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ prepare_matrix_random_complex_double_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#else -+ prepare_matrix_random_complex_single_f(na, myid, na_rows, na_cols, sc_desc, a, z, as); -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) != ELPA_OK) { -+ fprintf(stderr, "Error: ELPA API version not supported"); -+ exit(1); -+ } -+ -+ elpa_handle_1 = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ set_basic_parameters(&elpa_handle_1, na, nev, na_rows, na_cols, nblk, my_prow, my_pcol); -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(elpa_handle_1)); -+ -+ elpa_set(elpa_handle_1, "gpu", 0, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(elpa_handle_1, "timings", 1, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_set(elpa_handle_1, "debug", 1, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_store_settings(elpa_handle_1, "initial_parameters.txt", &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ // barrier after store settings, file created from one MPI rank only, but loaded everywhere -+ MPI_Barrier(MPI_COMM_WORLD); -+#endif -+ -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_handle_2 = elpa_allocate(); -+#else -+ elpa_handle_2 = elpa_allocate(&error_elpa); -+ assert_elpa_ok(error_elpa); -+#endif -+ -+ set_basic_parameters(&elpa_handle_2, na, nev, na_rows, na_cols, nblk, my_prow, my_pcol); -+ /* Setup */ -+ assert_elpa_ok(elpa_setup(elpa_handle_2)); -+ -+ elpa_load_settings(elpa_handle_2, "initial_parameters.txt", &error_elpa); -+ -+ elpa_get(elpa_handle_2, "gpu", &gpu, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_get(elpa_handle_2, "timings", &timings, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ elpa_get(elpa_handle_2, "debug", &debug, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if ((timings != 1) || (debug != 1) || (gpu != 0)){ -+ printf("Parameters not stored or loaded correctly. Aborting... %d, %d, %d\n", timings, debug, gpu); -+ exit(1); -+ } -+ -+ elpa_handle_ptr = &elpa_handle_2; -+ -+ autotune_handle = elpa_autotune_setup(*elpa_handle_ptr, ELPA_AUTOTUNE_FAST, ELPA_AUTOTUNE_DOMAIN_REAL, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ /* mimic 20 scf steps */ -+ -+ for (i=0; i < 20; i++) { -+ -+ unfinished = elpa_autotune_step(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ if (unfinished == 0) { -+ if (myid == 0) { -+ printf("ELPA autotuning finished in the %d th scf step \n",i); -+ } -+ break; -+ } -+ -+ elpa_print_settings(*elpa_handle_ptr, &error_elpa); -+ elpa_autotune_print_state(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ sprintf(str, "saved_parameters_%d.txt", i); -+ elpa_store_settings(*elpa_handle_ptr, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* Solve EV problem */ -+ elpa_eigenvectors(*elpa_handle_ptr, a, ev, z, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ /* check the results */ -+#ifdef TEST_REAL -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_real_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(double)); -+ -+#else -+ status = check_correctness_evp_numeric_residuals_real_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(float)); -+#endif -+#else -+#ifdef TEST_DOUBLE -+ status = check_correctness_evp_numeric_residuals_complex_double_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex double)); -+#else -+ status = check_correctness_evp_numeric_residuals_complex_single_f(na, nev, na_rows, na_cols, as, z, ev, -+ sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol); -+ memcpy(a, as, na_rows*na_cols*sizeof(complex float)); -+#endif -+#endif -+ -+ if (status !=0){ -+ printf("The computed EVs are not correct !\n"); -+ break; -+ } -+ -+ elpa_autotune_print_state(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ sprintf(str, "saved_state_%d.txt", i); -+ elpa_autotune_save_state(*elpa_handle_ptr, autotune_handle, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+#ifdef WITH_MPI -+ //barrier after save state, file created from one MPI rank only, but loaded everywhere -+ MPI_Barrier(MPI_COMM_WORLD); -+#endif -+ -+ elpa_autotune_load_state(*elpa_handle_ptr, autotune_handle, str, &error_elpa); -+ assert_elpa_ok(error_elpa); -+ -+ if (unfinished == 1) { -+ if (myid == 0) { -+ printf("ELPA autotuning did not finished during %d scf cycles\n",i); -+ } -+ } -+ -+ } -+ elpa_autotune_set_best(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ -+ if (myid == 0) { -+ printf("The best combination found by the autotuning:\n"); -+ elpa_autotune_print_best(*elpa_handle_ptr, autotune_handle, &error_elpa); -+ } -+ -+ elpa_autotune_deallocate(autotune_handle, &error_elpa); -+ elpa_deallocate(elpa_handle_1, &error_elpa); -+#ifdef OPTIONAL_C_ERROR_ARGUMENT -+ elpa_deallocate(elpa_handle_2); -+#else -+ elpa_deallocate(elpa_handle_2, &error_elpa); -+#endif -+ elpa_uninit(&error_elpa); -+ -+ if (myid == 0) { -+ printf("\n"); -+ printf("2stage ELPA real solver complete\n"); -+ printf("\n"); -+ } -+ -+ if (status ==0){ -+ if (myid ==0) { -+ printf("All ok!\n"); -+ } -+ } -+ -+ free(a); -+ free(z); -+ free(as); -+ free(ev); -+ -+#ifdef WITH_MPI -+ MPI_Finalize(); -+#endif -+ -+ return !!status; -+} -diff -ruN elpa-2020.11.001/examples/Fortran/assert.h elpa-2020.11.001_ok/examples/Fortran/assert.h ---- elpa-2020.11.001/examples/Fortran/assert.h 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/assert.h 2021-02-02 12:54:41.845532000 +0100 -@@ -0,0 +1,7 @@ -+#define stringify_(x) "x" -+#define stringify(x) stringify_(x) -+#define assert(x) call x_a(x, stringify(x), "F", __LINE__) -+ -+#define assert_elpa_ok(error_code) call x_ao(error_code, stringify(error_code), __FILE__, __LINE__) -+ -+! vim: syntax=fortran -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/complex_2stage_banded.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/complex_2stage_banded.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/complex_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/complex_2stage_banded.F90 2021-02-02 12:54:41.866126000 +0100 -@@ -0,0 +1,295 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 complex case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The complex ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "COMPLEX_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+program test_complex2_double_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - COMPLEX version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+#ifdef WITH_MPI -+ !TEST_INT_TYPE, external :: numroc -+#endif -+ complex(kind=ck8), parameter :: CZERO = (0.0_rk8,0.0_rk8), CONE = (1.0_rk8,0.0_rk8) -+ real(kind=rk8), allocatable :: ev(:) -+ -+ complex(kind=ck8), allocatable :: a(:,:), z(:,:), as(:,:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ type(output_t) :: write_to_file -+ integer(kind=c_int) :: error_elpa -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ -+ -+ TEST_INT_TYPE :: numberOfDevices -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+ -+#define COMPLEXCASE -+#define DOUBLE_PRECISION_COMPLEX 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - COMPLEX version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ ! Determine the necessary size of the distributed matrices, -+ ! we use the Scalapack tools routine NUMROC for that. -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0 -+ as(local_row, local_col) = 0 -+ end if -+ end do -+ end do -+ -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/double_instance.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/double_instance.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/double_instance.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/double_instance.F90 2021-02-02 12:54:41.866517000 +0100 -@@ -0,0 +1,244 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "../assert.h" -+ -+program test_interface -+ use elpa -+ -+ use precision_for_tests -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ real(kind=C_DOUBLE), allocatable :: a1(:,:), as1(:,:) -+ ! eigenvectors -+ real(kind=C_DOUBLE), allocatable :: z1(:,:) -+ ! eigenvalues -+ real(kind=C_DOUBLE), allocatable :: ev1(:) -+ -+ ! The Matrix -+ complex(kind=C_DOUBLE_COMPLEX), allocatable :: a2(:,:), as2(:,:) -+ ! eigenvectors -+ complex(kind=C_DOUBLE_COMPLEX), allocatable :: z2(:,:) -+ ! eigenvalues -+ real(kind=C_DOUBLE), allocatable :: ev2(:) -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ TEST_INT_TYPE :: solver -+ TEST_INT_TYPE :: qr -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e1, e2 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+ -+ status = 0 -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ -+ np_rows = nprocs/np_cols -+ -+ my_prow = mod(myid, np_cols) -+ my_pcol = myid / np_cols -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a1 (na_rows,na_cols), as1(na_rows,na_cols)) -+ allocate(z1 (na_rows,na_cols)) -+ allocate(ev1(na)) -+ -+ a1(:,:) = 0.0 -+ z1(:,:) = 0.0 -+ ev1(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a1, z1, as1) -+ allocate(a2 (na_rows,na_cols), as2(na_rows,na_cols)) -+ allocate(z2 (na_rows,na_cols)) -+ allocate(ev2(na)) -+ -+ a2(:,:) = 0.0 -+ z2(:,:) = 0.0 -+ ev2(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a2, z2, as2) -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e1 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e1%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e1%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ assert(e1%setup() .eq. ELPA_OK) -+ -+ call e1%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("real_kernel", ELPA_2STAGE_REAL_DEFAULT, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ e2 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e2%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e2%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ assert(e2%setup() .eq. ELPA_OK) -+ -+ call e2%set("solver", ELPA_SOLVER_1STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%eigenvectors(a1, ev1, z1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e2%eigenvectors(a2, ev2, z2, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e2, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as1, z1, ev1, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a1) -+ deallocate(as1) -+ deallocate(z1) -+ deallocate(ev1) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as2, z2, ev2, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a2) -+ deallocate(as2) -+ deallocate(z2) -+ deallocate(ev2) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+ -+ -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/real_2stage_banded.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/real_2stage_banded.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/real_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/real_2stage_banded.F90 2021-02-02 12:54:41.866398000 +0100 -@@ -0,0 +1,294 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The real ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "REAL_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+program test_real2_double_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+ !TEST_INT_TYPE, external :: numroc -+ -+ real(kind=rk8), allocatable :: a(:,:), z(:,:), as(:,:), ev(:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ integer(kind=c_int) :: error_elpa -+ TEST_INT_TYPE :: numberOfDevices -+ type(output_t) :: write_to_file -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+#define DOUBLE_PRECISION_REAL 1 -+ -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+#define REALCASE -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - REAL version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g(local_row, nblk, my_prow, np_rows) -+ do local_col = 1, na_cols -+ global_col = index_l2g(local_col, nblk, my_pcol, np_cols) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0.0 -+ as(local_row, local_col) = 0.0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/single_complex_2stage_banded.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/single_complex_2stage_banded.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/single_complex_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/single_complex_2stage_banded.F90 2021-02-02 12:54:41.866260000 +0100 -@@ -0,0 +1,295 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 complex case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The complex ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "COMPLEX_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+program test_complex2_single_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - COMPLEX version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ -+ use test_output_type -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+#ifdef WITH_MPI -+ !TEST_INT_TYPE, external :: numroc -+#endif -+ complex(kind=ck4), parameter :: CZERO = (0.0_rk4,0.0_rk4), CONE = (1.0_rk4,0.0_rk4) -+ real(kind=rk4), allocatable :: ev(:) -+ -+ complex(kind=ck4), allocatable :: a(:,:), z(:,:), as(:,:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ type(output_t) :: write_to_file -+ integer(kind=ik) :: error_elpa -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ -+ -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+ -+#define COMPLEXCASE -+#define DOUBLE_PRECISION_COMPLEX 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ STATUS = 0 -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - COMPLEX version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ ! Determine the necessary size of the distributed matrices, -+ ! we use the Scalapack tools routine NUMROC for that. -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0 -+ as(local_row, local_col) = 0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.11.001/examples/Fortran/elpa2/single_real_2stage_banded.F90 elpa-2020.11.001_ok/examples/Fortran/elpa2/single_real_2stage_banded.F90 ---- elpa-2020.11.001/examples/Fortran/elpa2/single_real_2stage_banded.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa2/single_real_2stage_banded.F90 2021-02-02 12:54:41.866634000 +0100 -@@ -0,0 +1,287 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "../assert.h" -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (500, 150, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+!> The real ELPA 2 kernel is set as the default kernel. -+!> However, this can be overriden by setting -+!> the environment variable "REAL_ELPA_KERNEL" to an -+!> appropiate value. -+!> -+program test_real2_single_banded -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ use elpa -+ -+ !use test_util -+ use test_read_input_parameters -+ use test_check_correctness -+ use test_setup_mpi -+ use test_blacs_infrastructure -+ use test_prepare_matrix -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ use test_output_type -+ use tests_scalapack_interfaces -+ implicit none -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ TEST_INT_TYPE :: nblk -+ TEST_INT_TYPE :: na, nev -+ -+ TEST_INT_TYPE :: np_rows, np_cols, na_rows, na_cols -+ -+ TEST_INT_TYPE :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ TEST_INT_TYPE :: i, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ real(kind=rk4), allocatable :: a(:,:), z(:,:), as(:,:), ev(:) -+ -+ TEST_INT_TYPE :: STATUS -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_TYPE :: omp_get_max_threads, required_mpi_thread_level, provided_mpi_thread_level -+#endif -+ integer(kind=c_int) :: error_elpa -+ type(output_t) :: write_to_file -+ character(len=8) :: task_suffix -+ TEST_INT_TYPE :: j -+ TEST_INT_TYPE :: global_row, global_col, local_row, local_col -+ TEST_INT_TYPE :: bandwidth -+ class(elpa_t), pointer :: e -+#define DOUBLE_PRECISION_REAL 1 -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ -+ !------------------------------------------------------------------------------- -+ ! MPI Initialization -+ call setup_mpi(myid, nprocs) -+ -+ -+ STATUS = 0 -+ -+#define REALCASE -+ -+ !------------------------------------------------------------------------------- -+ ! Selection of number of processor rows/columns -+ ! We try to set up the grid square-like, i.e. start the search for possible -+ ! divisors of nprocs with a number next to the square root of nprocs -+ ! and decrement it until a divisor is found. -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ if(myid==0) then -+ print * -+ print '(a)','Standard eigenvalue problem - REAL version' -+ print * -+ print '(3(a,i0))','Matrix size=',na,', Number of eigenvectors=',nev,', Block size=',nblk -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print * -+ endif -+ -+ !------------------------------------------------------------------------------- -+ ! Set up BLACS context and MPI communicators -+ ! -+ ! The BLACS context is only necessary for using Scalapack. -+ ! -+ ! For ELPA, the MPI communicators along rows/cols are sufficient, -+ ! and the grid setup may be done in an arbitrary way as long as it is -+ ! consistent (i.e. 0<=my_prow<np_rows, 0<=my_pcol<np_cols and every -+ ! process has a unique (my_prow,my_pcol) pair). -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, 'C', & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ -+ call set_up_blacs_descriptor(na ,nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ !------------------------------------------------------------------------------- -+ ! Allocate matrices and set up a test matrix for the eigenvalue problem -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ -+ ! set values outside of the bandwidth to zero -+ bandwidth = nblk -+ -+ do local_row = 1, na_rows -+ global_row = index_l2g( local_row, nblk, my_prow, np_rows ) -+ do local_col = 1, na_cols -+ global_col = index_l2g( local_col, nblk, my_pcol, np_cols ) -+ -+ if (ABS(global_row-global_col) > bandwidth) then -+ a(local_row, local_col) = 0.0 -+ as(local_row, local_col) = 0.0 -+ end if -+ end do -+ end do -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ call e%set("bandwidth", int(bandwidth,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert(e%setup() .eq. ELPA_OK) -+ -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ !------------------------------------------------------------------------------- -+ ! Test correctness of result (using plain scalapack routines) -+ -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ deallocate(a) -+ deallocate(as) -+ -+ deallocate(z) -+ deallocate(ev) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call EXIT(STATUS) -+end -+ -+!------------------------------------------------------------------------------- -diff -ruN elpa-2020.11.001/examples/Fortran/elpa_print_headers.F90 elpa-2020.11.001_ok/examples/Fortran/elpa_print_headers.F90 ---- elpa-2020.11.001/examples/Fortran/elpa_print_headers.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/elpa_print_headers.F90 2021-02-02 12:54:41.858363000 +0100 -@@ -0,0 +1,273 @@ -+#if 0 -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+! ELPA1 -- Faster replacements for ScaLAPACK symmetric eigenvalue routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+#endif -+ -+#ifdef WITH_OPENMP_TRADITIONAL -+ if (myid .eq. 0) then -+ print *,"Threaded version of test program" -+ print *,"Using ",omp_get_max_threads()," threads" -+ print *," " -+ endif -+#endif -+ -+#ifndef WITH_MPI -+ if (myid .eq. 0) then -+ print *,"This version of ELPA does not support MPI parallelisation" -+ print *,"For MPI support re-build ELPA with appropiate flags" -+ print *," " -+ endif -+#endif -+ -+#ifdef ELPA1 -+ -+#ifdef REALCASE -+#ifdef DOUBLE_PRECISION_REAL -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued double-precision version of ELPA1 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued single-precision version of ELPA1 is used" -+ print *," " -+ endif -+#endif -+ -+#endif -+ -+#ifdef COMPLEXCASE -+#ifdef DOUBLE_PRECISION_COMPLEX -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued double-precision version of ELPA1 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued single-precision version of ELPA1 is used" -+ print *," " -+ endif -+#endif -+ -+#endif /* DATATYPE */ -+ -+#else /* ELPA1 */ -+ -+#ifdef REALCASE -+#ifdef DOUBLE_PRECISION_REAL -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued double-precision version of ELPA2 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Real valued single-precision version of ELPA2 is used" -+ print *," " -+ endif -+#endif -+ -+#endif -+ -+#ifdef COMPLEXCASE -+#ifdef DOUBLE_PRECISION_COMPLEX -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued double-precision version of ELPA2 is used" -+ print *," " -+ endif -+#else -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Complex valued single-precision version of ELPA2 is used" -+ print *," " -+ endif -+#endif -+ -+#endif /* DATATYPE */ -+ -+#endif /* ELPA1 */ -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+#ifdef HAVE_REDIRECT -+ if (check_redirect_environment_variable()) then -+ if (myid .eq. 0) then -+ print *," " -+ print *,"Redirection of mpi processes is used" -+ print *," " -+ if (create_directories() .ne. 1) then -+ write(error_unit,*) "Unable to create directory for stdout and stderr!" -+ stop 1 -+ endif -+ endif -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call redirect_stdout(myid) -+ endif -+#endif -+ -+#ifndef ELPA1 -+ -+ if (myid .eq. 0) then -+ print *," " -+ print *,"This ELPA2 is build with" -+#ifdef WITH_GPU_KERNEL -+ print *,"GPU support" -+#endif -+ print *," " -+#ifdef REALCASE -+ -+#ifdef HAVE_AVX2 -+ -+#ifdef WITH_REAL_AVX_BLOCK2_KERNEL -+ print *,"AVX2 optimized kernel (2 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK4_KERNEL -+ print *,"AVX2 optimized kernel (4 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK6_KERNEL -+ print *,"AVX2 optimized kernel (6 blocking) for real matrices" -+#endif -+ -+#else /* no HAVE_AVX2 */ -+ -+#ifdef HAVE_AVX -+ -+#ifdef WITH_REAL_AVX_BLOCK2_KERNEL -+ print *,"AVX optimized kernel (2 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK4_KERNEL -+ print *,"AVX optimized kernel (4 blocking) for real matrices" -+#endif -+#ifdef WITH_REAL_AVX_BLOCK6_KERNEL -+ print *,"AVX optimized kernel (6 blocking) for real matrices" -+#endif -+ -+#endif -+ -+#endif /* HAVE_AVX2 */ -+ -+ -+#ifdef WITH_REAL_GENERIC_KERNEL -+ print *,"GENERIC kernel for real matrices" -+#endif -+#ifdef WITH_REAL_GENERIC_SIMPLE_KERNEL -+ print *,"GENERIC SIMPLE kernel for real matrices" -+#endif -+#ifdef WITH_REAL_SSE_ASSEMBLY_KERNEL -+ print *,"SSE ASSEMBLER kernel for real matrices" -+#endif -+#ifdef WITH_REAL_BGP_KERNEL -+ print *,"BGP kernel for real matrices" -+#endif -+#ifdef WITH_REAL_BGQ_KERNEL -+ print *,"BGQ kernel for real matrices" -+#endif -+ -+#endif /* DATATYPE == REAL */ -+ -+#ifdef COMPLEXCASE -+ -+#ifdef HAVE_AVX2 -+ -+#ifdef WITH_COMPLEX_AVX_BLOCK2_KERNEL -+ print *,"AVX2 optimized kernel (2 blocking) for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_AVX_BLOCK1_KERNEL -+ print *,"AVX2 optimized kernel (1 blocking) for complex matrices" -+#endif -+ -+#else /* no HAVE_AVX2 */ -+ -+#ifdef HAVE_AVX -+ -+#ifdef WITH_COMPLEX_AVX_BLOCK2_KERNEL -+ print *,"AVX optimized kernel (2 blocking) for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_AVX_BLOCK1_KERNEL -+ print *,"AVX optimized kernel (1 blocking) for complex matrices" -+#endif -+ -+#endif -+ -+#endif /* HAVE_AVX2 */ -+ -+ -+#ifdef WITH_COMPLEX_GENERIC_KERNEL -+ print *,"GENERIC kernel for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_GENERIC_SIMPLE_KERNEL -+ print *,"GENERIC SIMPLE kernel for complex matrices" -+#endif -+#ifdef WITH_COMPLEX_SSE_ASSEMBLY_KERNEL -+ print *,"SSE ASSEMBLER kernel for complex matrices" -+#endif -+ -+#endif /* DATATYPE == COMPLEX */ -+ -+ endif -+#endif /* ELPA1 */ -+ -+ if (write_to_file%eigenvectors) then -+ if (myid .eq. 0) print *,"Writing Eigenvectors to files" -+ endif -+ -+ if (write_to_file%eigenvalues) then -+ if (myid .eq. 0) print *,"Writing Eigenvalues to files" -+ endif -+ -+ -diff -ruN elpa-2020.11.001/examples/Fortran/Makefile_examples_hybrid elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_hybrid ---- elpa-2020.11.001/examples/Fortran/Makefile_examples_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_hybrid 2021-07-02 10:49:17.191984000 +0200 -@@ -0,0 +1,38 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -qopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_MODULES) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+# GCC -+# F90 = mpif90 -O3 -fopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_MODULES) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+LIBS = -L$(ELPA_LIB) -lelpatest_openmp -lelpa_openmp $(SCALAPACK_LIB) $(MKL) -+# CC = mpicc -qopenmp -O3 -+# GCC -+# CC = mpicc -fopenmp -O3 -+ -+all: test_real_1stage_omp test_real_2stage_all_kernels_omp test_autotune_omp test_multiple_objs_omp test_split_comm_omp test_skewsymmetric_omp -+ -+test_real_1stage_omp: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP)/elpa -o test_real_1stage_omp.F90 test.F90 -+ $(F90) -o $@ test_real_1stage_omp.F90 $(LIBS) -+ -+test_real_2stage_all_kernels_omp: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -DWITH_MPI -I$(ELPA_INCLUDE_OPENMP)/elpa -o test_real_2stage_all_kernels_omp.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels_omp.F90 $(LIBS) -+ -+test_autotune_omp: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs_omp: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm_omp: test_split_comm.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_split_comm.F90 $(LIBS) -+ -+test_skewsymmetric_omp: test_skewsymmetric.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DWITH_OPENMP -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE_OPENMP)/elpa -o $@ test_skewsymmetric.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Fortran/Makefile_examples_pure elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_pure ---- elpa-2020.11.001/examples/Fortran/Makefile_examples_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_pure 2021-07-02 10:53:24.225432000 +0200 -@@ -0,0 +1,34 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+# CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs test_split_comm test_skewsymmetric -+ -+test_real_1stage: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_1stage.F90 test.F90 -+ $(F90) -o $@ test_real_1stage.F90 $(LIBS) -+ -+test_real_2stage_all_kernels: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=0 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_2stage_all_kernels.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels.F90 $(LIBS) -+ -+test_autotune: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm: test_split_comm.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_split_comm.F90 $(LIBS) -+ -+test_skewsymmetric: test_skewsymmetric.F90 -+ $(F90) -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_skewsymmetric.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Fortran/Makefile_examples_pure_cuda elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_pure_cuda ---- elpa-2020.11.001/examples/Fortran/Makefile_examples_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/Makefile_examples_pure_cuda 2021-07-02 10:53:52.066370000 +0200 -@@ -0,0 +1,34 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -lcudart -+# CC = mpicc -O3 -+ -+all: test_real_1stage test_real_2stage_all_kernels test_autotune test_multiple_objs test_split_comm test_skewsymmetric -+ -+test_real_1stage: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_1STAGE -DTEST_EIGENVECTORS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_1stage.F90 test.F90 -+ $(F90) -o $@ test_real_1stage.F90 $(LIBS) -+ -+test_real_2stage_all_kernels: test.F90 -+ /usr/bin/cpp -P -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DTEST_SOLVER_2STAGE -DTEST_EIGENVECTORS -DTEST_ALL_KERNELS -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o test_real_2stage_all_kernels.F90 test.F90 -+ $(F90) -o $@ test_real_2stage_all_kernels.F90 $(LIBS) -+ -+test_autotune: test_autotune.F90 -+ $(F90) -DTEST_REAL -DTEST_GPU=1 -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_autotune.F90 $(LIBS) -+ -+test_multiple_objs: test_multiple_objs.F90 -+ $(F90) -DTEST_REAL -DTEST_GPU=1 -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_multiple_objs.F90 $(LIBS) -+ -+test_split_comm: test_split_comm.F90 -+ $(F90) -DTEST_GPU=1 -DTEST_REAL -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_split_comm.F90 $(LIBS) -+ -+test_skewsymmetric: test_skewsymmetric.F90 -+ $(F90) -DTEST_REAL -DTEST_GPU=1 -DTEST_DOUBLE -DWITH_MPI -DCURRENT_API_VERSION=20190524 -I$(ELPA_INCLUDE)/elpa -o $@ test_skewsymmetric.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Fortran/test_autotune.F90 elpa-2020.11.001_ok/examples/Fortran/test_autotune.F90 ---- elpa-2020.11.001/examples/Fortran/test_autotune.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test_autotune.F90 2021-02-02 12:54:41.866864000 +0100 -@@ -0,0 +1,312 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+ class(elpa_autotune_t), pointer :: tune_state -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ as(:,:) = a(:,:) -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (layout .eq. 'C') then -+ call e%set("matrix_order",COLUMN_MAJOR_ORDER,error_elpa) -+ else -+ call e%set("matrix_order",ROW_MAJOR_ORDER,error_elpa) -+ endif -+ -+#ifdef WITH_MPI -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ call e%set("timings",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("debug",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("gpu", 0, error_elpa) -+ assert_elpa_ok(error_elpa) -+ !call e%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e%setup()) -+ -+ if (myid == 0) print *, "" -+ -+ tune_state => e%autotune_setup(ELPA_AUTOTUNE_FAST, AUTOTUNE_DOMAIN, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ iter=0 -+ do while (e%autotune_step(tune_state, error_elpa)) -+ assert_elpa_ok(error_elpa) -+ iter=iter+1 -+ write(iter_string,'(I5.5)') iter -+ !call e%print_settings() -+ !call e%store_settings("saved_parameters_"//trim(iter_string)//".txt") -+ call e%timer_start("eigenvectors: iteration "//trim(iter_string)) -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("eigenvectors: iteration "//trim(iter_string)) -+ -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e%print_times("eigenvectors: iteration "//trim(iter_string)) -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ a(:,:) = as(:,:) -+ !call e%autotune_print_state(tune_state) -+ !call e%autotune_save_state(tune_state, "saved_state_"//trim(iter_string)//".txt") -+ end do -+ -+ ! set and print the autotuned-settings -+ call e%autotune_set_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "The best combination found by the autotuning:" -+ flush(output_unit) -+ call e%autotune_print_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ endif -+ ! de-allocate autotune object -+ call elpa_autotune_deallocate(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "Running once more time with the best found setting..." -+ endif -+ call e%timer_start("eigenvectors: best setting") -+ call e%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("eigenvectors: best setting") -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e%print_times("eigenvectors: best setting") -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ -+ call elpa_deallocate(e,error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/test.F90 elpa-2020.11.001_ok/examples/Fortran/test.F90 ---- elpa-2020.11.001/examples/Fortran/test.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test.F90 2021-02-02 12:54:41.862023000 +0100 -@@ -0,0 +1,926 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#if !(defined(TEST_SOLVER_1STAGE) ^ defined(TEST_SOLVER_2STAGE) ^ defined(TEST_SCALAPACK_ALL) ^ defined(TEST_SCALAPACK_PART)) -+error: define exactly one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE or TEST_SCALAPACK_ALL or TEST_SCALAPACK_PART -+#endif -+ -+#ifdef TEST_SOLVER_1STAGE -+#ifdef TEST_ALL_KERNELS -+error: TEST_ALL_KERNELS cannot be defined for TEST_SOLVER_1STAGE -+#endif -+#ifdef TEST_KERNEL -+error: TEST_KERNEL cannot be defined for TEST_SOLVER_1STAGE -+#endif -+#endif -+ -+#ifdef TEST_SOLVER_2STAGE -+#if !(defined(TEST_KERNEL) ^ defined(TEST_ALL_KERNELS)) -+error: define either TEST_ALL_KERNELS or a valid TEST_KERNEL -+#endif -+#endif -+ -+#ifdef TEST_GENERALIZED_DECOMP_EIGENPROBLEM -+#define TEST_GENERALIZED_EIGENPROBLEM -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+#ifdef TEST_REAL -+#define KERNEL_KEY "real_kernel" -+#endif -+#ifdef TEST_COMPLEX -+#define KERNEL_KEY "complex_kernel" -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+#ifdef WITH_SCALAPACK_TESTS -+ use test_scalapack -+#endif -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+#ifdef WITH_OPENMP -+ use omp_lib -+#endif -+ use precision_for_tests -+ -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_MPI_TYPE :: myidMPI, nprocsMPI -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ MATRIX_TYPE, allocatable :: b(:,:), c(:,:) -+#endif -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ MATRIX_TYPE, allocatable :: b(:,:), bs(:,:) -+#endif -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ logical :: check_all_evals, skip_check_correctness -+ -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ EV_TYPE, allocatable :: d(:), sd(:), ds(:), sds(:) -+ EV_TYPE :: diagonalELement, subdiagonalElement -+#endif -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+#ifdef TEST_ALL_KERNELS -+ TEST_INT_TYPE :: i -+#endif -+#ifdef TEST_ALL_LAYOUTS -+ character(len=1), parameter :: layouts(2) = [ 'C', 'R' ] -+ TEST_INT_TYPE :: i_layout -+#endif -+ integer(kind=c_int):: kernel -+ character(len=1) :: layout -+ logical :: do_test_numeric_residual, do_test_numeric_residual_generalized, & -+ do_test_analytic_eigenvalues, & -+ do_test_analytic_eigenvalues_eigenvectors, & -+ do_test_frank_eigenvalues, & -+ do_test_toeplitz_eigenvalues, do_test_cholesky, & -+ do_test_hermitian_multiply -+ logical :: ignoreError -+#ifdef WITH_OPENMP -+ TEST_INT_TYPE :: max_threads, threads_caller -+#endif -+ -+#ifdef SPLIT_COMM_MYSELF -+ TEST_INT_MPI_TYPE :: mpi_comm_rows, mpi_comm_cols, mpi_string_length, mpierr2 -+ character(len=MPI_MAX_ERROR_STRING) :: mpierr_string -+#endif -+ -+ ignoreError = .false. -+ -+ call read_input_parameters_traditional(na, nev, nblk, write_to_file, skip_check_correctness) -+ call setup_mpi(myid, nprocs) -+ -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ check_all_evals = .true. -+ -+ -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .false. -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ -+ do_test_cholesky = .false. -+#if defined(TEST_CHOLESKY) -+ do_test_cholesky = .true. -+#endif -+ do_test_hermitian_multiply = .false. -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ do_test_hermitian_multiply = .true. -+#endif -+ -+ status = 0 -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Program ' // TEST_CASE -+ print *, "" -+ endif -+ -+#ifdef TEST_ALL_LAYOUTS -+ do i_layout = 1, size(layouts) ! layouts -+ layout = layouts(i_layout) -+ do np_cols = 1, nprocs ! factors -+ if (mod(nprocs,np_cols) /= 0 ) then -+ cycle -+ endif -+#else -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+#endif -+ -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+#if TEST_QR_DECOMPOSITION == 1 -+ -+#if TEST_GPU == 1 -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+#endif /* TEST_GPU */ -+ if (nblk .lt. 64) then -+ if (myid .eq. 0) then -+ print *,"At the moment QR decomposition need blocksize of at least 64" -+ endif -+ if ((na .lt. 64) .and. (myid .eq. 0)) then -+ print *,"This is why the matrix size must also be at least 64 or only 1 MPI task can be used" -+ endif -+ -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+ endif -+#endif /* TEST_QR_DECOMPOSITION */ -+ -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, & -+ np_cols, layout, my_blacs_ctxt, my_prow, & -+ my_pcol) -+ -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) && defined(TEST_ALL_LAYOUTS) -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+#endif -+ -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+ allocate(b (na_rows,na_cols)) -+ allocate(c (na_rows,na_cols)) -+#endif -+ -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ allocate(b (na_rows,na_cols)) -+ allocate(bs (na_rows,na_cols)) -+#endif -+ -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ allocate(d (na), ds(na)) -+ allocate(sd (na), sds(na)) -+#endif -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+#if defined(TEST_MATRIX_RANDOM) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) && !defined(TEST_EIGENVALUES) -+ ! the random matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! RANDOM + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! RANDOM + TEST_CHOLESKY: wee need SPD matrix -+ ! RANDOM + TEST_EIGENVALUES: no correctness check known -+ -+ ! We also have to take care of special case in TEST_EIGENVECTORS -+#if !defined(TEST_EIGENVECTORS) -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+#else /* TEST_EIGENVECTORS */ -+ if (nev .ge. 1) then -+ call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_numeric_residual = .true. -+#endif -+ else -+ if (myid .eq. 0) then -+ print *,"At the moment with the random matrix you need nev >=1" -+ endif -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ stop 77 -+ endif -+#endif /* TEST_EIGENVECTORS */ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* (TEST_MATRIX_RANDOM) */ -+ -+#if defined(TEST_MATRIX_RANDOM) && defined(TEST_CHOLESKY) -+ call prepare_matrix_random_spd(na, myid, sc_desc, a, z, as, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* TEST_MATRIX_RANDOM and TEST_CHOLESKY */ -+ -+#if defined(TEST_MATRIX_RANDOM) && defined(TEST_GENERALIZED_EIGENPROBLEM) -+ ! call prepare_matrix_random(na, myid, sc_desc, a, z, as) -+ call prepare_matrix_random_spd(na, myid, sc_desc, b, z, bs, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .true. -+#endif /* TEST_MATRIX_RANDOM and TEST_GENERALIZED_EIGENPROBLEM */ -+ -+#if defined(TEST_MATRIX_RANDOM) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_EIGENVALUES)) -+#error "Random matrix is not allowed in this configuration" -+#endif -+ -+#if defined(TEST_MATRIX_ANALYTIC) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) -+ ! the analytic matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! ANALYTIC + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! ANALTIC + TEST_CHOLESKY: no correctness check yet implemented -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ as(:,:) = a -+ -+ do_test_numeric_residual = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_analytic_eigenvalues = .true. -+#endif -+#if defined(TEST_EIGENVECTORS) -+ if (nev .ge. 1) then -+ do_test_analytic_eigenvalues_eigenvectors = .true. -+ do_test_analytic_eigenvalues = .false. -+ else -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ endif -+#endif -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+#endif /* TEST_MATRIX_ANALYTIC */ -+#if defined(TEST_MATRIX_ANALYTIC) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_CHOLESKY)) -+#error "Analytic matrix is not allowd in this configuration" -+#endif -+ -+#if defined(TEST_MATRIX_TOEPLITZ) -+ ! The Toeplitz matrix works in each test -+#ifdef TEST_SINGLE -+ diagonalElement = 0.45_c_float -+ subdiagonalElement = 0.78_c_float -+#else -+ diagonalElement = 0.45_c_double -+ subdiagonalElement = 0.78_c_double -+#endif -+ -+! actually we test cholesky for diagonal matrix only -+#if defined(TEST_CHOLESKY) -+#ifdef TEST_SINGLE -+ diagonalElement = (2.546_c_float, 0.0_c_float) -+ subdiagonalElement = (0.0_c_float, 0.0_c_float) -+#else -+ diagonalElement = (2.546_c_double, 0.0_c_double) -+ subdiagonalElement = (0.0_c_double, 0.0_c_double) -+#endif -+#endif /* TEST_CHOLESKY */ -+ -+ call prepare_matrix_toeplitz(na, diagonalElement, subdiagonalElement, & -+ d, sd, ds, sds, a, as, nblk, np_rows, & -+ np_cols, my_prow, my_pcol) -+ -+ -+ do_test_numeric_residual = .false. -+#if defined(TEST_EIGENVECTORS) -+ if (nev .ge. 1) then -+ do_test_numeric_residual = .true. -+ else -+ do_test_numeric_residual = .false. -+ endif -+#endif -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+#if defined(TEST_CHOLESKY) -+ do_test_toeplitz_eigenvalues = .false. -+#else -+ do_test_toeplitz_eigenvalues = .true. -+#endif -+ -+#endif /* TEST_MATRIX_TOEPLITZ */ -+ -+ -+#if defined(TEST_MATRIX_FRANK) && !defined(TEST_SOLVE_TRIDIAGONAL) && !defined(TEST_CHOLESKY) -+ ! the random matrix can be used in allmost all tests; but for some no -+ ! correctness checks have been implemented; do not allow these -+ ! combinations -+ ! FRANK + TEST_SOLVE_TRIDIAGONAL: we need a TOEPLITZ MATRIX -+ ! FRANK + TEST_CHOLESKY: no correctness check yet implemented -+ -+ ! We also have to take care of special case in TEST_EIGENVECTORS -+#if !defined(TEST_EIGENVECTORS) -+ call prepare_matrix_frank(na, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ -+#else /* TEST_EIGENVECTORS */ -+ -+ if (nev .ge. 1) then -+ call prepare_matrix_frank(na, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ else -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+#ifndef TEST_HERMITIAN_MULTIPLY -+ do_test_frank_eigenvalues = .true. -+#endif -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_numeric_residual = .false. -+ -+ endif -+ -+#endif /* TEST_EIGENVECTORS */ -+#endif /* (TEST_MATRIX_FRANK) */ -+#if defined(TEST_MATRIX_FRANK) && (defined(TEST_SOLVE_TRIDIAGONAL) || defined(TEST_CHOLESKY)) -+#error "FRANK matrix is not allowed in this configuration" -+#endif -+ -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+#ifdef TEST_REAL -+ -+#ifdef TEST_DOUBLE -+ b(:,:) = 2.0_c_double * a(:,:) -+ c(:,:) = 0.0_c_double -+#else -+ b(:,:) = 2.0_c_float * a(:,:) -+ c(:,:) = 0.0_c_float -+#endif -+ -+#endif /* TEST_REAL */ -+ -+#ifdef TEST_COMPLEX -+ -+#ifdef TEST_DOUBLE -+ b(:,:) = 2.0_c_double * a(:,:) -+ c(:,:) = (0.0_c_double, 0.0_c_double) -+#else -+ b(:,:) = 2.0_c_float * a(:,:) -+ c(:,:) = (0.0_c_float, 0.0_c_float) -+#endif -+ -+#endif /* TEST_COMPLEX */ -+ -+#endif /* TEST_HERMITIAN_MULTIPLY */ -+ -+! if the test is used for (repeated) performacne tests, one might want to skip the checking -+! of the results, which might be time-consuming and not necessary. -+ if(skip_check_correctness) then -+ do_test_numeric_residual = .false. -+ do_test_numeric_residual_generalized = .false. -+ do_test_analytic_eigenvalues = .false. -+ do_test_analytic_eigenvalues_eigenvectors = .false. -+ do_test_frank_eigenvalues = .false. -+ do_test_toeplitz_eigenvalues = .false. -+ do_test_cholesky = .false. -+ endif -+ -+ -+#ifdef WITH_OPENMP -+ threads_caller = omp_get_max_threads() -+ if (myid == 0) then -+ print *,"The calling program uses ",threads_caller," threads" -+ endif -+#endif -+ -+ e => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (layout .eq. 'C') then -+ call e%set("matrix_order",COLUMN_MAJOR_ORDER,error_elpa) -+ else -+ call e%set("matrix_order",ROW_MAJOR_ORDER,error_elpa) -+ endif -+ -+#ifdef WITH_MPI -+#ifdef SPLIT_COMM_MYSELF -+ call mpi_comm_split(MPI_COMM_WORLD, int(my_pcol,kind=MPI_KIND), int(my_prow,kind=MPI_KIND), & -+ mpi_comm_rows, mpierr) -+ if (mpierr .ne. MPI_SUCCESS) then -+ call MPI_ERROR_STRING(mpierr, mpierr_string, mpi_string_length, mpierr2) -+ write(error_unit,*) "MPI ERROR occured during mpi_comm_split for row communicator: ", trim(mpierr_string) -+ stop 1 -+ endif -+ -+ call mpi_comm_split(MPI_COMM_WORLD, int(my_prow,kind=MPI_KIND), int(my_pcol,kind=MPI_KIND), & -+ mpi_comm_cols, mpierr) -+ if (mpierr .ne. MPI_SUCCESS) then -+ call MPI_ERROR_STRING(mpierr,mpierr_string, mpi_string_length, mpierr2) -+ write(error_unit,*) "MPI ERROR occured during mpi_comm_split for col communicator: ", trim(mpierr_string) -+ stop 1 -+ endif -+ -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("mpi_comm_rows", int(mpi_comm_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("mpi_comm_cols", int(mpi_comm_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#else -+ call e%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ call e%set("blacs_context", int(my_blacs_ctxt,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ call e%set("timings", 1_ik, error_elpa) -+ assert_elpa_ok(e%setup()) -+ -+#ifdef TEST_SOLVER_1STAGE -+ call e%set("solver", ELPA_SOLVER_1STAGE, error_elpa) -+#else -+ call e%set("solver", ELPA_SOLVER_2STAGE, error_elpa) -+#endif -+ assert_elpa_ok(error_elpa) -+ -+ call e%set("gpu", TEST_GPU, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%set("qr", 1_ik, error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+#ifdef WITH_OPENMP -+ max_threads=omp_get_max_threads() -+ call e%set("omp_threads", int(max_threads,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ -+ if (myid == 0) print *, "" -+ -+#ifdef TEST_ALL_KERNELS -+ do i = 0, elpa_option_cardinality(KERNEL_KEY) ! kernels -+ if (TEST_GPU .eq. 0) then -+ kernel = elpa_option_enumerate(KERNEL_KEY, int(i,kind=c_int)) -+ if (kernel .eq. ELPA_2STAGE_REAL_GPU) continue -+ if (kernel .eq. ELPA_2STAGE_COMPLEX_GPU) continue -+ endif -+#endif -+#ifdef TEST_KERNEL -+ kernel = TEST_KERNEL -+#endif -+ -+#ifdef TEST_SOLVER_2STAGE -+#if TEST_GPU == 1 -+#if defined TEST_REAL -+ kernel = ELPA_2STAGE_REAL_GPU -+#endif -+#if defined TEST_COMPLEX -+ kernel = ELPA_2STAGE_COMPLEX_GPU -+#endif -+#endif -+ call e%set(KERNEL_KEY, kernel, error_elpa) -+#ifdef TEST_KERNEL -+ assert_elpa_ok(error_elpa) -+#else -+ if (error_elpa /= ELPA_OK) then -+ cycle -+ endif -+ ! actually used kernel might be different if forced via environment variables -+ call e%get(KERNEL_KEY, kernel, error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ if (myid == 0) then -+ print *, elpa_int_value_to_string(KERNEL_KEY, kernel) // " kernel" -+ endif -+#endif -+ -+ -+! print all parameters -+ call e%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef TEST_ALL_KERNELS -+ call e%timer_start(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#endif -+ -+ ! The actual solve step -+#if defined(TEST_EIGENVECTORS) -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%timer_start("e%eigenvectors_qr()") -+#else -+ call e%timer_start("e%eigenvectors()") -+#endif -+#ifdef TEST_SCALAPACK_ALL -+ call solve_scalapack_all(na, a, sc_desc, ev, z) -+#elif TEST_SCALAPACK_PART -+ call solve_scalapack_part(na, a, sc_desc, nev, ev, z) -+ check_all_evals = .false. ! scalapack does not compute all eigenvectors -+#else -+ call e%eigenvectors(a, ev, z, error_elpa) -+#endif -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%timer_stop("e%eigenvectors_qr()") -+#else -+ call e%timer_stop("e%eigenvectors()") -+#endif -+#endif /* TEST_EIGENVECTORS */ -+ -+#ifdef TEST_EIGENVALUES -+ call e%timer_start("e%eigenvalues()") -+ call e%eigenvalues(a, ev, error_elpa) -+ call e%timer_stop("e%eigenvalues()") -+#endif -+ -+#if defined(TEST_SOLVE_TRIDIAGONAL) -+ call e%timer_start("e%solve_tridiagonal()") -+ call e%solve_tridiagonal(d, sd, z, error_elpa) -+ call e%timer_stop("e%solve_tridiagonal()") -+ ev(:) = d(:) -+#endif -+ -+#if defined(TEST_CHOLESKY) -+ call e%timer_start("e%cholesky()") -+ call e%cholesky(a, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e%timer_stop("e%cholesky()") -+#endif -+ -+#if defined(TEST_HERMITIAN_MULTIPLY) -+ call e%timer_start("e%hermitian_multiply()") -+ call e%hermitian_multiply('F','F', int(na,kind=c_int), a, b, int(na_rows,kind=c_int), & -+ int(na_cols,kind=c_int), c, int(na_rows,kind=c_int), & -+ int(na_cols,kind=c_int), error_elpa) -+ call e%timer_stop("e%hermitian_multiply()") -+#endif -+ -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ call e%timer_start("e%generalized_eigenvectors()") -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ call e%timer_start("is_already_decomposed=.false.") -+#endif -+ call e%generalized_eigenvectors(a, b, ev, z, .false., error_elpa) -+#if defined(TEST_GENERALIZED_DECOMP_EIGENPROBLEM) -+ call e%timer_stop("is_already_decomposed=.false.") -+ a = as -+ call e%timer_start("is_already_decomposed=.true.") -+ call e%generalized_eigenvectors(a, b, ev, z, .true., error_elpa) -+ call e%timer_stop("is_already_decomposed=.true.") -+#endif -+ call e%timer_stop("e%generalized_eigenvectors()") -+#endif -+ -+ assert_elpa_ok(error_elpa) -+ -+#ifdef TEST_ALL_KERNELS -+ call e%timer_stop(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#endif -+ -+ if (myid .eq. 0) then -+#ifdef TEST_ALL_KERNELS -+ call e%print_times(elpa_int_value_to_string(KERNEL_KEY, kernel)) -+#else /* TEST_ALL_KERNELS */ -+ -+#if defined(TEST_EIGENVECTORS) -+#if TEST_QR_DECOMPOSITION == 1 -+ call e%print_times("e%eigenvectors_qr()") -+#else -+ call e%print_times("e%eigenvectors()") -+#endif -+#endif -+#ifdef TEST_EIGENVALUES -+ call e%print_times("e%eigenvalues()") -+#endif -+#ifdef TEST_SOLVE_TRIDIAGONAL -+ call e%print_times("e%solve_tridiagonal()") -+#endif -+#ifdef TEST_CHOLESKY -+ call e%print_times("e%cholesky()") -+#endif -+#ifdef TEST_HERMITIAN_MULTIPLY -+ call e%print_times("e%hermitian_multiply()") -+#endif -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ call e%print_times("e%generalized_eigenvectors()") -+#endif -+#endif /* TEST_ALL_KERNELS */ -+ endif -+ -+ if (do_test_analytic_eigenvalues) then -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, & -+ my_prow, my_pcol, check_all_evals, .false.) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_analytic_eigenvalues_eigenvectors) then -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, & -+ my_prow, my_pcol, check_all_evals, .true.) -+ call check_status(status, myid) -+ endif -+ -+ if(do_test_numeric_residual) then -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, & -+ np_rows,np_cols, my_prow, my_pcol) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_frank_eigenvalues) then -+ status = check_correctness_eigenvalues_frank(na, ev, z, myid) -+ call check_status(status, myid) -+ endif -+ -+ if (do_test_toeplitz_eigenvalues) then -+#if defined(TEST_EIGENVALUES) || defined(TEST_SOLVE_TRIDIAGONAL) -+ status = check_correctness_eigenvalues_toeplitz(na, diagonalElement, & -+ subdiagonalElement, ev, z, myid) -+ call check_status(status, myid) -+#endif -+ endif -+ -+ if (do_test_cholesky) then -+ status = check_correctness_cholesky(na, a, as, na_rows, sc_desc, myid ) -+ call check_status(status, myid) -+ endif -+ -+#ifdef TEST_HERMITIAN_MULTIPLY -+ if (do_test_hermitian_multiply) then -+ status = check_correctness_hermitian_multiply(na, a, b, c, na_rows, sc_desc, myid ) -+ call check_status(status, myid) -+ endif -+#endif -+ -+#ifdef TEST_GENERALIZED_EIGENPROBLEM -+ if(do_test_numeric_residual_generalized) then -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, & -+ np_cols, my_prow, & -+ my_pcol, bs) -+ call check_status(status, myid) -+ endif -+#endif -+ -+ -+#ifdef WITH_OPENMP -+ if (threads_caller .ne. omp_get_max_threads()) then -+ if (myid .eq. 0) then -+ print *, " ERROR! the number of OpenMP threads has not been restored correctly" -+ endif -+ status = 1 -+ endif -+#endif -+ if (myid == 0) then -+ print *, "" -+ endif -+ -+#ifdef TEST_ALL_KERNELS -+ a(:,:) = as(:,:) -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ d = ds -+ sd = sds -+#endif -+ end do ! kernels -+#endif -+ -+ call elpa_deallocate(e, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+#ifdef TEST_HERMITIAN_MULTIPLY -+ deallocate(b) -+ deallocate(c) -+#endif -+#if defined(TEST_MATRIX_TOEPLITZ) || defined(TEST_MATRIX_FRANK) -+ deallocate(d, ds) -+ deallocate(sd, sds) -+#endif -+#if defined(TEST_GENERALIZED_EIGENPROBLEM) -+ deallocate(b, bs) -+#endif -+ -+#ifdef TEST_ALL_LAYOUTS -+ end do ! factors -+ end do ! layouts -+#endif -+ call elpa_uninit(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ -+ contains -+ -+ subroutine check_status(status, myid) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: status, myid -+ TEST_INT_MPI_TYPE :: mpierr -+ if (status /= 0) then -+ if (myid == 0) print *, "Result incorrect!" -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/test_multiple_objs.F90 elpa-2020.11.001_ok/examples/Fortran/test_multiple_objs.F90 ---- elpa-2020.11.001/examples/Fortran/test_multiple_objs.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test_multiple_objs.F90 2021-02-02 12:54:41.866751000 +0100 -@@ -0,0 +1,379 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+ -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_TYPE :: ierr -+ TEST_INT_MPI_TYPE :: mpierr -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e1, e2, e_ptr -+ class(elpa_autotune_t), pointer :: tune_state -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ TEST_INT_TYPE :: timings, debug, gpu -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ call prepare_matrix_analytic(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ as(:,:) = a(:,:) -+ -+ e1 => elpa_allocate(error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+ call set_basic_params(e1, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e1%set("timings",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("debug",1, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e1%set("gpu", 0, error_elpa) -+ assert_elpa_ok(error_elpa) -+ !call e1%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e1%setup()) -+ -+ call e1%store_settings("initial_parameters.txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ ! barrier after store settings, file created from one MPI rank only, but loaded everywhere -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ -+ ! try to load parameters into another object -+ e2 => elpa_allocate(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call set_basic_params(e2, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ call e2%load_settings("initial_parameters.txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ assert_elpa_ok(e2%setup()) -+ -+ ! test whether the user setting of e1 are correctly loade to e2 -+ call e2%get("timings", int(timings,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%get("debug", int(debug,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e2%get("gpu", int(gpu,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if ((timings .ne. 1) .or. (debug .ne. 1) .or. (gpu .ne. 0)) then -+ print *, "Parameters not stored or loaded correctly. Aborting...", timings, debug, gpu -+ stop 1 -+ endif -+ -+ if(myid == 0) print *, "parameters of e1" -+ call e1%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if(myid == 0) print *, "" -+ if(myid == 0) print *, "parameters of e2" -+ call e2%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ e_ptr => e2 -+ -+ -+ tune_state => e_ptr%autotune_setup(ELPA_AUTOTUNE_FAST, AUTOTUNE_DOMAIN, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ -+ iter=0 -+ do while (e_ptr%autotune_step(tune_state, error_elpa)) -+ assert_elpa_ok(error_elpa) -+ -+ iter=iter+1 -+ write(iter_string,'(I5.5)') iter -+ call e_ptr%print_settings(error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%store_settings("saved_parameters_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%timer_start("eigenvectors: iteration "//trim(iter_string)) -+ call e_ptr%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ call e_ptr%timer_stop("eigenvectors: iteration "//trim(iter_string)) -+ -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e_ptr%print_times("eigenvectors: iteration "//trim(iter_string)) -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ a(:,:) = as(:,:) -+ call e_ptr%autotune_print_state(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%autotune_save_state(tune_state, "saved_state_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+#ifdef WITH_MPI -+ ! barrier after save state, file created from one MPI rank only, but loaded everywhere -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call e_ptr%autotune_load_state(tune_state, "saved_state_"//trim(iter_string)//".txt", error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ end do -+ -+ ! set and print the autotuned-settings -+ call e_ptr%autotune_set_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "The best combination found by the autotuning:" -+ flush(output_unit) -+ call e_ptr%autotune_print_best(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ endif -+ ! de-allocate autotune object -+ call elpa_autotune_deallocate(tune_state, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ if (myid .eq. 0) then -+ print *, "Running once more time with the best found setting..." -+ endif -+ call e_ptr%timer_start("eigenvectors: best setting") -+ call e_ptr%eigenvectors(a, ev, z, error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call e_ptr%timer_stop("eigenvectors: best setting") -+ assert_elpa_ok(error_elpa) -+ if (myid .eq. 0) then -+ print *, "" -+ call e_ptr%print_times("eigenvectors: best setting") -+ endif -+ status = check_correctness_analytic(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, & -+ .true., .true., print_times=.false.) -+ -+ call elpa_deallocate(e_ptr, error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ !assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol -+ -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call elpa%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/test_skewsymmetric.F90 elpa-2020.11.001_ok/examples/Fortran/test_skewsymmetric.F90 ---- elpa-2020.11.001/examples/Fortran/test_skewsymmetric.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test_skewsymmetric.F90 2021-02-02 12:54:41.850860000 +0100 -@@ -0,0 +1,400 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# define EV_TYPE_COMPLEX complex(kind=C_FLOAT_COMPLEX) -+# define MATRIX_TYPE_COMPLEX complex(kind=C_FLOAT_COMPLEX) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define MATRIX_TYPE_COMPLEX complex(kind=C_DOUBLE_COMPLEX) -+# define EV_TYPE_COMPLEX complex(kind=C_DOUBLE_COMPLEX) -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use precision_for_tests -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a_skewsymmetric(:,:), as_skewsymmetric(:,:) -+ MATRIX_TYPE_COMPLEX, allocatable :: a_complex(:,:), as_complex(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z_skewsymmetric(:,:) -+ MATRIX_TYPE_COMPLEX, allocatable :: z_complex(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev_skewsymmetric(:), ev_complex(:) -+ -+ TEST_INT_TYPE :: status, i, j -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e_complex, e_skewsymmetric -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ call setup_mpi(myid, nprocs) -+#ifdef HAVE_REDIRECT -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+! -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs/np_cols -+ assert(nprocs == np_rows * np_cols) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+#ifdef WITH_MPI -+ print '((a,i0))', 'Num MPI proc: ', nprocs -+ print '(3(a,i0))','Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs -+ print '(a)', 'Process layout: ' // layout -+#endif -+ print *,'' -+ endif -+ -+ call set_up_blacsgrid(int(mpi_comm_world,kind=BLAS_KIND), np_rows, & -+ np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a_skewsymmetric (na_rows,na_cols)) -+ allocate(as_skewsymmetric(na_rows,na_cols)) -+ allocate(z_skewsymmetric (na_rows,2*na_cols)) -+ allocate(ev_skewsymmetric(na)) -+ -+ a_skewsymmetric(:,:) = 0.0 -+ z_skewsymmetric(:,:) = 0.0 -+ ev_skewsymmetric(:) = 0.0 -+ -+ call prepare_matrix_random(na, myid, sc_desc, a_skewsymmetric, & -+ z_skewsymmetric(:,1:na_cols), as_skewsymmetric, is_skewsymmetric=1) -+ -+ !call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ as_skewsymmetric(:,:) = a_skewsymmetric(:,:) -+ -+ -+ ! prepare the complex matrix for the "brute force" case -+ allocate(a_complex (na_rows,na_cols)) -+ allocate(as_complex(na_rows,na_cols)) -+ allocate(z_complex (na_rows,na_cols)) -+ allocate(ev_complex(na)) -+ -+ a_complex(1:na_rows,1:na_cols) = 0.0 -+ z_complex(1:na_rows,1:na_cols) = 0.0 -+ as_complex(1:na_rows,1:na_cols) = 0.0 -+ -+ -+ do j=1, na_cols -+ do i=1,na_rows -+#ifdef TEST_DOUBLE -+ a_complex(i,j) = dcmplx(0.0, a_skewsymmetric(i,j)) -+#endif -+#ifdef TEST_SINGLE -+ a_complex(i,j) = cmplx(0.0, a_skewsymmetric(i,j)) -+#endif -+ enddo -+ enddo -+ -+ -+ -+ z_complex(1:na_rows,1:na_cols) = a_complex(1:na_rows,1:na_cols) -+ as_complex(1:na_rows,1:na_cols) = a_complex(1:na_rows,1:na_cols) -+ -+ ! first set up and solve the brute force problem -+ e_complex => elpa_allocate(error_elpa) -+ call set_basic_params(e_complex, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e_complex%set("timings",1, error_elpa) -+ -+ call e_complex%set("debug",1,error_elpa) -+ call e_complex%set("gpu", 0,error_elpa) -+ call e_complex%set("omp_threads", 8, error_elpa) -+ -+ assert_elpa_ok(e_complex%setup()) -+ call e_complex%set("solver", elpa_solver_2stage, error_elpa) -+ -+ call e_complex%timer_start("eigenvectors: brute force as complex matrix") -+ call e_complex%eigenvectors(a_complex, ev_complex, z_complex, error_elpa) -+ call e_complex%timer_stop("eigenvectors: brute force as complex matrix") -+ -+ if (myid .eq. 0) then -+ print *, "" -+ call e_complex%print_times("eigenvectors: brute force as complex matrix") -+ endif -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+! as_complex(:,:) = z_complex(:,:) -+#ifdef TEST_SINGLE -+ status = check_correctness_evp_numeric_residuals_complex_single(na, nev, as_complex, z_complex, ev_complex, sc_desc, & -+ nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#else -+ status = check_correctness_evp_numeric_residuals_complex_double(na, nev, as_complex, z_complex, ev_complex, sc_desc, & -+ nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#endif -+ status = 0 -+ call check_status(status, myid) -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ ! now run the skewsymmetric case -+ e_skewsymmetric => elpa_allocate(error_elpa) -+ call set_basic_params(e_skewsymmetric, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ -+ call e_skewsymmetric%set("timings",1, error_elpa) -+ -+ call e_skewsymmetric%set("debug",1,error_elpa) -+ call e_skewsymmetric%set("gpu", 0,error_elpa) -+ call e_skewsymmetric%set("omp_threads",8, error_elpa) -+ -+ assert_elpa_ok(e_skewsymmetric%setup()) -+ -+ call e_skewsymmetric%set("solver", elpa_solver_2stage, error_elpa) -+ -+ call e_skewsymmetric%timer_start("eigenvectors: skewsymmetric ") -+ call e_skewsymmetric%skew_eigenvectors(a_skewsymmetric, ev_skewsymmetric, z_skewsymmetric, error_elpa) -+ call e_skewsymmetric%timer_stop("eigenvectors: skewsymmetric ") -+ -+ if (myid .eq. 0) then -+ print *, "" -+ call e_skewsymmetric%print_times("eigenvectors: skewsymmetric") -+ endif -+ -+ ! check eigenvalues -+ do i=1, na -+ if (myid == 0) then -+#ifdef TEST_DOUBLE -+ if (abs(ev_complex(i)-ev_skewsymmetric(i))/abs(ev_complex(i)) .gt. 1e-10) then -+#endif -+#ifdef TEST_SINGLE -+ if (abs(ev_complex(i)-ev_skewsymmetric(i))/abs(ev_complex(i)) .gt. 1e-4) then -+#endif -+ print *,"ev: i=",i,ev_complex(i),ev_skewsymmetric(i) -+ status = 1 -+ endif -+ endif -+ enddo -+ -+ -+! call check_status(status, myid) -+ -+ z_complex(:,:) = 0 -+ do j=1, na_cols -+ do i=1,na_rows -+#ifdef TEST_DOUBLE -+ z_complex(i,j) = dcmplx(z_skewsymmetric(i,j), z_skewsymmetric(i,na_cols+j)) -+#endif -+#ifdef TEST_SINGLE -+ z_complex(i,j) = cmplx(z_skewsymmetric(i,j), z_skewsymmetric(i,na_cols+j)) -+#endif -+ enddo -+ enddo -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ -+#ifdef TEST_SINGLE -+ status = check_correctness_evp_numeric_residuals_ss_real_single(na, nev, as_skewsymmetric, z_complex, ev_skewsymmetric, & -+ sc_desc, nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#else -+ status = check_correctness_evp_numeric_residuals_ss_real_double(na, nev, as_skewsymmetric, z_complex, ev_skewsymmetric, & -+ sc_desc, nblk, myid, np_rows,np_cols, my_prow, my_pcol) -+#endif -+ -+#ifdef WITH_MPI -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+#endif -+ call elpa_deallocate(e_complex,error_elpa) -+ call elpa_deallocate(e_skewsymmetric,error_elpa) -+ -+ -+ !to do -+ ! - check whether brute-force check_correctness_evp_numeric_residuals worsk (complex ev) -+ ! - invent a test for skewsymmetric residuals -+ -+ deallocate(a_complex) -+ deallocate(as_complex) -+ deallocate(z_complex) -+ deallocate(ev_complex) -+ -+ deallocate(a_skewsymmetric) -+ deallocate(as_skewsymmetric) -+ deallocate(z_skewsymmetric) -+ deallocate(ev_skewsymmetric) -+ call elpa_uninit(error_elpa) -+ -+ -+ -+#ifdef WITH_MPI -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+#endif -+ -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, my_prow, my_pcol) -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol -+ -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+#ifdef WITH_MPI -+ call elpa%set("mpi_comm_parent", int(MPI_COMM_WORLD,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ subroutine check_status(status, myid) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: status, myid -+ TEST_INT_MPI_TYPE :: mpierr -+ if (status /= 0) then -+ if (myid == 0) print *, "Result incorrect!" -+#ifdef WITH_MPI -+ call mpi_finalize(mpierr) -+#endif -+ call exit(status) -+ endif -+ end subroutine -+end program -diff -ruN elpa-2020.11.001/examples/Fortran/test_split_comm.F90 elpa-2020.11.001_ok/examples/Fortran/test_split_comm.F90 ---- elpa-2020.11.001/examples/Fortran/test_split_comm.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Fortran/test_split_comm.F90 2021-02-02 12:54:41.854142000 +0100 -@@ -0,0 +1,340 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+! Define one of TEST_REAL or TEST_COMPLEX -+! Define one of TEST_SINGLE or TEST_DOUBLE -+! Define one of TEST_SOLVER_1STAGE or TEST_SOLVER_2STAGE -+! Define TEST_GPU \in [0, 1] -+! Define either TEST_ALL_KERNELS or a TEST_KERNEL \in [any valid kernel] -+ -+#if !(defined(TEST_REAL) ^ defined(TEST_COMPLEX)) -+error: define exactly one of TEST_REAL or TEST_COMPLEX -+#endif -+ -+#if !(defined(TEST_SINGLE) ^ defined(TEST_DOUBLE)) -+error: define exactly one of TEST_SINGLE or TEST_DOUBLE -+#endif -+ -+#ifdef TEST_SINGLE -+# define EV_TYPE real(kind=C_FLOAT) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_FLOAT) -+# else -+# define MATRIX_TYPE complex(kind=C_FLOAT_COMPLEX) -+# endif -+#else -+# define EV_TYPE real(kind=C_DOUBLE) -+# ifdef TEST_REAL -+# define MATRIX_TYPE real(kind=C_DOUBLE) -+# else -+# define MATRIX_TYPE complex(kind=C_DOUBLE_COMPLEX) -+# endif -+#endif -+ -+ -+#ifdef TEST_REAL -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_REAL -+#else -+# define AUTOTUNE_DOMAIN ELPA_AUTOTUNE_DOMAIN_COMPLEX -+#endif -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+#include "assert.h" -+ -+program test -+ use elpa -+ -+ !use test_util -+ use test_setup_mpi -+ use test_prepare_matrix -+ use test_read_input_parameters -+ use test_blacs_infrastructure -+ use test_check_correctness -+ use test_analytic -+ use iso_fortran_env -+ -+#ifdef HAVE_REDIRECT -+ use test_redirect -+#endif -+ implicit none -+ -+ ! matrix dimensions -+ TEST_INT_TYPE :: na, nev, nblk -+ TEST_INT_TYPE :: num_groups, group_size, color, key -+ -+ ! mpi -+ TEST_INT_TYPE :: myid, nprocs -+ TEST_INT_TYPE :: na_cols, na_rows ! local matrix size -+ TEST_INT_TYPE :: np_cols, np_rows ! number of MPI processes per column/row -+ TEST_INT_TYPE :: my_prow, my_pcol ! local MPI task position (my_prow, my_pcol) in the grid (0..np_cols -1, 0..np_rows -1) -+ TEST_INT_MPI_TYPE :: mpierr, ierr,mpi_sub_commMPI, myidMPI, nprocsMPI, colorMPI, keyMPI, & -+ myid_subMPI, nprocs_subMPI -+ TEST_INT_TYPE :: mpi_sub_comm -+ TEST_INT_TYPE :: myid_sub, nprocs_sub -+ -+ ! blacs -+ character(len=1) :: layout -+ TEST_INT_TYPE :: my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ ! The Matrix -+ MATRIX_TYPE, allocatable :: a(:,:), as(:,:) -+ ! eigenvectors -+ MATRIX_TYPE, allocatable :: z(:,:) -+ ! eigenvalues -+ EV_TYPE, allocatable :: ev(:) -+ -+ TEST_INT_TYPE :: status -+ integer(kind=c_int) :: error_elpa -+ -+ type(output_t) :: write_to_file -+ class(elpa_t), pointer :: e -+ -+ TEST_INT_TYPE :: iter -+ character(len=5) :: iter_string -+ -+ status = 0 -+#ifdef WITH_MPI -+ -+ call read_input_parameters(na, nev, nblk, write_to_file) -+ !call setup_mpi(myid, nprocs) -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world, myidMPI,mpierr) -+ call mpi_comm_size(mpi_comm_world, nprocsMPI,mpierr) -+ myid = int(myidMPI,kind=BLAS_KIND) -+ nprocs = int(nprocsMPI,kind=BLAS_KIND) -+ -+ if((mod(nprocs, 4) == 0) .and. (nprocs > 4)) then -+ num_groups = 4 -+ else if(mod(nprocs, 3) == 0) then -+ num_groups = 3 -+ else if(mod(nprocs, 2) == 0) then -+ num_groups = 2 -+ else -+ num_groups = 1 -+ endif -+ -+ group_size = nprocs / num_groups -+ -+ if(num_groups * group_size .ne. nprocs) then -+ print *, "Something went wrong before splitting the communicator" -+ stop 1 -+ else -+ if(myid == 0) then -+ print '((a,i0,a,i0))', "The test will split the global communicator into ", num_groups, " groups of size ", group_size -+ endif -+ endif -+ -+ ! each group of processors will have the same color -+ color = mod(myid, num_groups) -+ ! this will determine the myid in each group -+ key = myid/num_groups -+ !split the communicator -+ colorMPI=int(color,kind=MPI_KIND) -+ keyMPI = int(key, kind=MPI_KIND) -+ call mpi_comm_split(mpi_comm_world, colorMPI, keyMPI, mpi_sub_commMPI, mpierr) -+ mpi_sub_comm = int(mpi_sub_commMPI,kind=BLAS_KIND) -+ color = int(colorMPI,kind=BLAS_KIND) -+ key = int(keyMPI,kind=BLAS_KIND) -+ if(mpierr .ne. MPI_SUCCESS) then -+ print *, "communicator splitting not successfull", mpierr -+ stop 1 -+ endif -+ -+ call mpi_comm_rank(mpi_sub_commMPI, myid_subMPI, mpierr) -+ call mpi_comm_size(mpi_sub_commMPI, nprocs_subMPI, mpierr) -+ myid_sub = int(myid_subMPI,kind=BLAS_KIND) -+ nprocs_sub = int(nprocs_subMPI,kind=BLAS_KIND) -+ -+ !print *, "glob ", myid, nprocs, ", loc ", myid_sub, nprocs_sub, ", color ", color, ", key ", key -+ -+ if((mpierr .ne. MPI_SUCCESS) .or. (nprocs_sub .ne. group_size) .or. (myid_sub >= group_size)) then -+ print *, "something wrong with the sub communicators" -+ stop 1 -+ endif -+ -+ -+#ifdef HAVE_REDIRECT -+ call MPI_BARRIER(MPI_COMM_WORLD, mpierr) -+ call redirect_stdout(myid) -+#endif -+ -+ if (elpa_init(CURRENT_API_VERSION) /= ELPA_OK) then -+ print *, "ELPA API version not supported" -+ stop 1 -+ endif -+ -+ layout = 'C' -+ do np_cols = NINT(SQRT(REAL(nprocs_sub))),2,-1 -+ if(mod(nprocs_sub,np_cols) == 0 ) exit -+ enddo -+ np_rows = nprocs_sub/np_cols -+ assert(nprocs_sub == np_rows * np_cols) -+ assert(nprocs == np_rows * np_cols * num_groups) -+ -+ if (myid == 0) then -+ print '((a,i0))', 'Matrix size: ', na -+ print '((a,i0))', 'Num eigenvectors: ', nev -+ print '((a,i0))', 'Blocksize: ', nblk -+ print '(a)', 'Process layout: ' // layout -+ print *,'' -+ endif -+ if (myid_sub == 0) then -+ print '(4(a,i0))','GROUP ', color, ': Number of processor rows=',np_rows,', cols=',np_cols,', total=',nprocs_sub -+ endif -+ -+ ! USING the subcommunicator -+ call set_up_blacsgrid(int(mpi_sub_comm,kind=BLAS_KIND), np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, np_rows, np_cols, & -+ na_rows, na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(as(na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ allocate(ev(na)) -+ -+ a(:,:) = 0.0 -+ z(:,:) = 0.0 -+ ev(:) = 0.0 -+ -+ !call prepare_matrix_analytic(na, a, nblk, myid_sub, np_rows, np_cols, my_prow, my_pcol, print_times=.false.) -+ call prepare_matrix_random(na, myid_sub, sc_desc, a, z, as) -+ as(:,:) = a(:,:) -+ -+ e => elpa_allocate(error_elpa) -+ call set_basic_params(e, na, nev, na_rows, na_cols, mpi_sub_comm, my_prow, my_pcol) -+ -+ call e%set("timings",1, error_elpa) -+ -+ call e%set("debug",1, error_elpa) -+ call e%set("gpu", 0, error_elpa) -+ !call e%set("max_stored_rows", 15, error_elpa) -+ -+ assert_elpa_ok(e%setup()) -+ -+ -+ -+! if(myid == 0) print *, "parameters of e" -+! call e%print_all_parameters() -+! if(myid == 0) print *, "" -+ -+ -+ call e%timer_start("eigenvectors") -+ call e%eigenvectors(a, ev, z, error_elpa) -+ call e%timer_stop("eigenvectors") -+ -+ assert_elpa_ok(error_elpa) -+ -+ !status = check_correctness_analytic(na, nev, ev, z, nblk, myid_sub, np_rows, np_cols, my_prow, my_pcol, & -+ ! .true., .true., print_times=.false.) -+ status = check_correctness_evp_numeric_residuals(na, nev, as, z, ev, sc_desc, nblk, myid_sub, & -+ np_rows,np_cols, my_prow, my_pcol) -+ if (status /= 0) & -+ print *, "processor ", myid, ": Result incorrect for processor group ", color -+ -+ if (myid .eq. 0) then -+ print *, "Showing times of one goup only" -+ call e%print_times("eigenvectors") -+ endif -+ -+ call elpa_deallocate(e, error_elpa) -+ -+ deallocate(a) -+ deallocate(as) -+ deallocate(z) -+ deallocate(ev) -+ -+ call elpa_uninit(error_elpa) -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+#endif -+ call exit(status) -+ -+contains -+ subroutine set_basic_params(elpa, na, nev, na_rows, na_cols, communicator, my_prow, my_pcol) -+ use iso_c_binding -+ implicit none -+ class(elpa_t), pointer :: elpa -+ TEST_INT_TYPE, intent(in) :: na, nev, na_rows, na_cols, my_prow, my_pcol, communicator -+ -+#ifdef WITH_MPI -+ call elpa%set("na", int(na,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nev", int(nev,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_nrows", int(na_rows,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("local_ncols", int(na_cols,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("nblk", int(nblk,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ -+ call elpa%set("mpi_comm_parent", int(communicator,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_row", int(my_prow,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+ call elpa%set("process_col", int(my_pcol,kind=c_int), error_elpa) -+ assert_elpa_ok(error_elpa) -+#endif -+ end subroutine -+ -+end program -diff -ruN elpa-2020.11.001/examples/Makefile_hybrid elpa-2020.11.001_ok/examples/Makefile_hybrid ---- elpa-2020.11.001/examples/Makefile_hybrid 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Makefile_hybrid 2021-07-02 10:19:18.864600000 +0200 -@@ -0,0 +1,24 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -qopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+# GCC -+# F90 = mpif90 -O3 -fopenmp -I$(ELPA_MODULES_OPENMP) -I$(ELPA_INCLUDE_OPENMP) -I$(ELPA_INCLUDE_OPENMP)/elpa -+LIBS = -L$(ELPA_LIB_OPENMP) -lelpa_openmp -lelpatest_openmp -lelpa $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -qopenmp -+# GCC -+# CC = mpicc -O3 -fopenmp -+ -+all: test_real_e1_omp test_real_e2_omp -+ -+test_real_e1_omp: test_real_e1.F90 -+ $(F90) -DWITH_OPENMP -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2_omp: test_real_e2.F90 -+ $(F90) -DWITH_OPENMP -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Makefile_pure elpa-2020.11.001_ok/examples/Makefile_pure ---- elpa-2020.11.001/examples/Makefile_pure 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Makefile_pure 2021-07-02 10:19:26.723701000 +0200 -@@ -0,0 +1,20 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -+CC = mpicc -O3 -+ -+all: test_real_e1 test_real_e2 -+ -+test_real_e1: test_real_e1.F90 -+ $(F90) -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2: test_real_e2.F90 -+ $(F90) -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/Makefile_pure_cuda elpa-2020.11.001_ok/examples/Makefile_pure_cuda ---- elpa-2020.11.001/examples/Makefile_pure_cuda 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/Makefile_pure_cuda 2021-07-02 10:19:34.549476000 +0200 -@@ -0,0 +1,20 @@ -+# MPICH, that is IntelMPI or ParaStationMPI -+SCALAPACK_LIB = -lmkl_scalapack_lp64 -lmkl_blacs_intelmpi_lp64 -+# OpenMPI -+# SCALAPACK_LIB = -lmkl_scalapack_lp64 $(MKLROOT)/lib/intel64/libmkl_blacs_openmpi_lp64.a -+LAPACK_LIB = -+# Intel compiler -+MKL = -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -liomp5 -lpthread -lstdc++ -+# GCC -+# MKL = -lmkl_gf_lp64 -lmkl_sequential -lmkl_core -lgomp -lpthread -lstdc++ -lm -+F90 = mpif90 -O3 -I$(ELPA_MODULES) -I$(ELPA_INCLUDE) -I$(ELPA_INCLUDE)/elpa -+LIBS = -L$(ELPA_LIB) -lelpa -lelpatest $(SCALAPACK_LIB) $(MKL) -lcudart -+CC = mpicc -O3 -+ -+all: test_real_e1 test_real_e2 -+ -+test_real_e1: test_real_e1.F90 -+ $(F90) -DCUDA -o $@ test_real_e1.F90 $(LIBS) -+ -+test_real_e2: test_real_e2.F90 -+ $(F90) -DCUDA -DCUDAKERNEL -o $@ test_real_e2.F90 $(LIBS) -diff -ruN elpa-2020.11.001/examples/shared/mod_tests_blas_interfaces.F90 elpa-2020.11.001_ok/examples/shared/mod_tests_blas_interfaces.F90 ---- elpa-2020.11.001/examples/shared/mod_tests_blas_interfaces.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/mod_tests_blas_interfaces.F90 2021-02-02 12:54:50.045819000 +0100 -@@ -0,0 +1,53 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! https://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! This file was written by A. Marek, MPCDF -+ -+#include "config-f90.h" -+#define PRECISION_MODULE precision_for_tests -+module tests_blas_interfaces -+ use iso_c_binding -+ use precision_for_tests -+ -+ implicit none -+ -+#include "../../src/helpers/fortran_blas_interfaces.F90" -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/mod_tests_scalapack_interfaces.F90 elpa-2020.11.001_ok/examples/shared/mod_tests_scalapack_interfaces.F90 ---- elpa-2020.11.001/examples/shared/mod_tests_scalapack_interfaces.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/mod_tests_scalapack_interfaces.F90 2021-02-02 12:54:50.050096000 +0100 -@@ -0,0 +1,56 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! https://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! This file was written by A. Marek, MPCDF -+ -+ -+#include "config-f90.h" -+#define PRECISION_MODULE precision_for_tests -+module tests_scalapack_interfaces -+ use iso_c_binding -+ use precision_for_tests -+ -+ implicit none -+ -+#include "../../src/helpers/fortran_scalapack_interfaces.F90" -+ -+end module -+ -+ -diff -ruN elpa-2020.11.001/examples/shared/test_analytic.F90 elpa-2020.11.001_ok/examples/shared/test_analytic.F90 ---- elpa-2020.11.001/examples/shared/test_analytic.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_analytic.F90 2021-02-02 12:54:50.050664000 +0100 -@@ -0,0 +1,190 @@ -+! (c) Copyright Pavel Kus, 2017, MPCDF -+! -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+ -+#include "../Fortran/assert.h" -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+module test_analytic -+ -+ use test_util -+#ifdef HAVE_DETAILED_TIMINGS -+ use ftimings -+#else -+ use timings_dummy -+#endif -+ use precision_for_tests -+ -+ interface prepare_matrix_analytic -+ module procedure prepare_matrix_analytic_complex_double -+ module procedure prepare_matrix_analytic_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_analytic_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_analytic_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_analytic -+ module procedure check_correctness_analytic_complex_double -+ module procedure check_correctness_analytic_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_analytic_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_analytic_complex_single -+#endif -+ end interface -+ -+ -+ interface print_matrix -+ module procedure print_matrix_complex_double -+ module procedure print_matrix_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure print_matrix_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure print_matrix_complex_single -+#endif -+ end interface -+ -+ TEST_INT_TYPE, parameter, private :: num_primes = 3 -+ TEST_INT_TYPE, parameter, private :: primes(num_primes) = (/2,3,5/) -+ -+ TEST_INT_TYPE, parameter, private :: ANALYTIC_MATRIX = 0 -+ TEST_INT_TYPE, parameter, private :: ANALYTIC_EIGENVECTORS = 1 -+ TEST_INT_TYPE, parameter, private :: ANALYTIC_EIGENVALUES = 2 -+ -+ contains -+ -+ function decompose(num, decomposition) result(possible) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: num -+ TEST_INT_TYPE, intent(out) :: decomposition(num_primes) -+ logical :: possible -+ TEST_INT_TYPE :: reminder, prime, prime_id -+ -+ decomposition = 0 -+ possible = .true. -+ reminder = num -+ do prime_id = 1, num_primes -+ prime = primes(prime_id) -+ do while (MOD(reminder, prime) == 0) -+ decomposition(prime_id) = decomposition(prime_id) + 1 -+ reminder = reminder / prime -+ end do -+ end do -+ if(reminder > 1) then -+ possible = .false. -+ end if -+ end function -+ -+ function compose(decomposition) result(num) -+ implicit none -+ TEST_INT_TYPE, intent(in) :: decomposition(num_primes) -+ TEST_INT_TYPE :: num, prime_id -+ -+ num = 1; -+ do prime_id = 1, num_primes -+ num = num * primes(prime_id) ** decomposition(prime_id) -+ end do -+ end function -+ -+ -+#include "../../src/general/prow_pcol.F90" -+#include "../../src/general/map_global_to_local.F90" -+ -+ -+#define COMPLEXCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_analytic_template.F90" -+#undef DOUBLE_PRECISION -+#undef COMPLEXCASE -+ -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ -+#define COMPLEXCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_analytic_template.F90" -+#undef SINGLE_PRECISION -+#undef COMPLEXCASE -+ -+#endif /* WANT_SINGLE_PRECISION_COMPLEX */ -+ -+#define REALCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_analytic_template.F90" -+#undef DOUBLE_PRECISION -+#undef REALCASE -+ -+#ifdef WANT_SINGLE_PRECISION_REAL -+ -+#define REALCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_analytic_template.F90" -+#undef SINGLE_PRECISION -+#undef REALCASE -+ -+#endif /* WANT_SINGLE_PRECISION_REAL */ -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_analytic_template.F90 elpa-2020.11.001_ok/examples/shared/test_analytic_template.F90 ---- elpa-2020.11.001/examples/shared/test_analytic_template.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_analytic_template.F90 2021-02-02 12:54:50.046589000 +0100 -@@ -0,0 +1,667 @@ -+! (c) Copyright Pavel Kus, 2017, MPCDF -+! -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+ -+ subroutine prepare_matrix_analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, a, nblk, myid, np_rows, np_cols, my_prow, my_pcol, print_times) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: na, nblk, myid, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=REAL_DATATYPE), intent(inout):: a(:,:) -+ logical, optional :: print_times -+ logical :: print_timer -+ TEST_INT_TYPE :: globI, globJ, locI, locJ, pi, pj, levels(num_primes) -+ integer(kind=c_int) :: loc_I, loc_J, p_i, p_j -+#ifdef HAVE_DETAILED_TIMINGS -+ type(timer_t) :: timer -+#else -+ type(timer_dummy_t) :: timer -+#endif -+ -+ call timer%enable() -+ call timer%start("prepare_matrix_analytic") -+ -+ print_timer = .true. -+ -+ if (present(print_times)) then -+ print_timer = print_times -+ endif -+ -+ ! for debug only, do it systematicaly somehow ... unit tests -+ call check_module_sanity_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid) -+ -+ if(.not. decompose(na, levels)) then -+ if(myid == 0) then -+ print *, "Analytic test can be run only with matrix sizes of the form 2^n * 3^m * 5^o" -+ stop 1 -+ end if -+ end if -+ -+ call timer%start("loop") -+ do globI = 1, na -+ -+ p_i = prow(int(globI,kind=c_int), int(nblk,kind=c_int), int(np_rows,kind=c_int)) -+ pi = int(p_i,kind=INT_TYPE) -+ if (my_prow .ne. pi) cycle -+ -+ do globJ = 1, na -+ -+ p_j = pcol(int(globJ,kind=c_int), int(nblk,kind=c_int), int(np_cols,kind=c_int)) -+ pj = int(p_j,kind=INT_TYPE) -+ if (my_pcol .ne. pj) cycle -+ -+ if(map_global_array_index_to_local_index(int(globI,kind=c_int), int(globJ,kind=c_int), loc_I, loc_J, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ locI = int(loc_i,kind=INT_TYPE) -+ locJ = int(loc_j,kind=INT_TYPE) -+ call timer%start("evaluation") -+ a(locI, locJ) = analytic_matrix_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, globI, globJ) -+ call timer%stop("evaluation") -+ else -+ print *, "Warning ... error in preparation loop of the analytic test" -+ end if -+ end do -+ end do -+ call timer%stop("loop") -+ -+ call timer%stop("prepare_matrix_analytic") -+ if(myid == 0 .and. print_timer) then -+ call timer%print("prepare_matrix_analytic") -+ end if -+ call timer%free() -+ end subroutine -+ -+ function check_correctness_analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, nev, ev, z, nblk, myid, np_rows, np_cols, my_prow, my_pcol, check_all_evals, & -+ check_eigenvectors, print_times) result(status) -+ use precision_for_tests -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: na, nev, nblk, myid, np_rows, & -+ np_cols, my_prow, my_pcol -+ TEST_INT_TYPE :: status -+ TEST_INT_MPI_TYPE :: mpierr -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:) -+ real(kind=rk), intent(inout) :: ev(:) -+ logical, intent(in) :: check_all_evals, check_eigenvectors -+ -+ TEST_INT_TYPE :: globI, globJ, locI, locJ, & -+ levels(num_primes) -+ integer(kind=c_int) :: loc_I, loc_J -+ real(kind=rk) :: diff, max_z_diff, max_ev_diff, & -+ glob_max_z_diff, max_curr_z_diff -+#ifdef DOUBLE_PRECISION -+ real(kind=rk), parameter :: tol_eigenvalues = 5e-14_rk8 -+ real(kind=rk), parameter :: tol_eigenvectors = 6e-11_rk8 -+#endif -+#ifdef SINGLE_PRECISION -+ ! tolerance needs to be very high due to qr tests -+ ! it should be distinguished somehow! -+ real(kind=rk), parameter :: tol_eigenvalues = 7e-6_rk4 -+ real(kind=rk), parameter :: tol_eigenvectors = 4e-3_rk4 -+#endif -+ real(kind=rk) :: computed_ev, expected_ev -+ MATH_DATATYPE(kind=rck) :: computed_z, expected_z -+ -+ MATH_DATATYPE(kind=rck) :: max_value_for_normalization, & -+ computed_z_on_max_position, & -+ normalization_quotient -+ MATH_DATATYPE(kind=rck) :: max_values_array(np_rows * np_cols), & -+ corresponding_exact_value -+ integer(kind=c_int) :: max_value_idx, rank_with_max, & -+ rank_with_max_reduced, & -+ num_checked_evals -+ integer(kind=c_int) :: max_idx_array(np_rows * np_cols), & -+ rank -+ logical, optional :: print_times -+ logical :: print_timer -+ -+#ifdef HAVE_DETAILED_TIMINGS -+ type(timer_t) :: timer -+#else -+ type(timer_dummy_t) :: timer -+#endif -+ -+ call timer%enable() -+ call timer%start("check_correctness_analytic") -+ -+ -+ print_timer = .true. -+ if (present(print_times)) then -+ print_timer = print_times -+ endif -+ -+ if(.not. decompose(na, levels)) then -+ print *, "can not decomopse matrix size" -+ stop 1 -+ end if -+ -+ if(check_all_evals) then -+ num_checked_evals = na -+ else -+ num_checked_evals = nev -+ endif -+ !call print_matrix(myid, na, z, "z") -+ max_z_diff = 0.0_rk -+ max_ev_diff = 0.0_rk -+ call timer%start("loop_eigenvalues") -+ do globJ = 1, num_checked_evals -+ computed_ev = ev(globJ) -+ call timer%start("evaluation") -+ expected_ev = analytic_eigenvalues_real_& -+ &PRECISION& -+ &(na, globJ) -+ call timer%stop("evaluation") -+ diff = abs(computed_ev - expected_ev) -+ max_ev_diff = max(diff, max_ev_diff) -+ end do -+ call timer%stop("loop_eigenvalues") -+ -+ call timer%start("loop_eigenvectors") -+ do globJ = 1, nev -+ max_curr_z_diff = 0.0_rk -+ -+ ! eigenvectors are unique up to multiplication by scalar (complex in complex case) -+ ! to be able to compare them with analytic, we have to normalize them somehow -+ ! we will find a value in computed eigenvector with highest absolut value and enforce -+ ! such multiple of computed eigenvector, that the value on corresponding position is the same -+ ! as an corresponding value in the analytical eigenvector -+ -+ ! find the maximal value in the local part of given eigenvector (with index globJ) -+ max_value_for_normalization = 0.0_rk -+ max_value_idx = -1 -+ do globI = 1, na -+ if(map_global_array_index_to_local_index(int(globI,kind=c_int), int(globJ,kind=c_int), loc_I, loc_J, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ locI = int(loc_I,kind=INT_TYPE) -+ locJ = int(loc_J,kind=INT_TYPE) -+ computed_z = z(locI, locJ) -+ if(abs(computed_z) > abs(max_value_for_normalization)) then -+ max_value_for_normalization = computed_z -+ max_value_idx = int(globI,kind=c_int) -+ end if -+ end if -+ end do -+ -+ ! find the global maximum and its position. From technical reasons (looking for a -+ ! maximum of complex number), it is not so easy to do it nicely. Therefore we -+ ! communicate local maxima to mpi rank 0 and resolve there. If we wanted to do -+ ! it without this, it would be tricky.. question of uniquness - two complex numbers -+ ! with the same absolut values, but completely different... -+#ifdef WITH_MPI -+ call MPI_Gather(max_value_for_normalization, 1_MPI_KIND, MPI_MATH_DATATYPE_PRECISION, & -+ max_values_array, 1_MPI_KIND, MPI_MATH_DATATYPE_PRECISION, 0_MPI_KIND, & -+ int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+ call MPI_Gather(max_value_idx, 1_MPI_KIND, MPI_INT, max_idx_array, 1_MPI_KIND, MPI_INT, & -+ 0_MPI_KIND, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+ max_value_for_normalization = 0.0_rk -+ max_value_idx = -1 -+ do rank = 1, np_cols * np_rows -+ if(abs(max_values_array(rank)) > abs(max_value_for_normalization)) then -+ max_value_for_normalization = max_values_array(rank) -+ max_value_idx = max_idx_array(rank) -+ end if -+ end do -+ call MPI_Bcast(max_value_for_normalization, 1_MPI_KIND, MPI_MATH_DATATYPE_PRECISION, & -+ 0_MPI_KIND, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+ call MPI_Bcast(max_value_idx, 1_MPI_KIND, MPI_INT, 0_MPI_KIND, & -+ int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#endif -+ ! we decided what the maximum computed value is. Calculate expected value on the same -+ if(abs(max_value_for_normalization) < 0.0001_rk) then -+ if(myid == 0) print *, 'Maximal value in eigenvector too small :', max_value_for_normalization -+ status =1 -+ return -+ end if -+ call timer%start("evaluation_helper") -+ corresponding_exact_value = analytic_eigenvectors_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, int(max_value_idx,kind=INT_TYPE), globJ) -+ call timer%stop("evaluation_helper") -+ normalization_quotient = corresponding_exact_value / max_value_for_normalization -+ ! write(*,*) "normalization q", normalization_quotient -+ -+ ! compare computed and expected eigenvector values, but take into account normalization quotient -+ do globI = 1, na -+ if(map_global_array_index_to_local_index(int(globI,kind=c_int), int(globJ,kind=c_int), loc_I, loc_J, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ locI = int(loc_I,kind=INT_TYPE) -+ locJ = int(loc_J,kind=INT_TYPE) -+ computed_z = z(locI, locJ) -+ call timer%start("evaluation") -+ expected_z = analytic_eigenvectors_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, globI, globJ) -+ call timer%stop("evaluation") -+ max_curr_z_diff = max(abs(normalization_quotient * computed_z - expected_z), max_curr_z_diff) -+ end if -+ end do -+ ! we have max difference of one of the eigenvectors, update global -+ max_z_diff = max(max_z_diff, max_curr_z_diff) -+ end do !globJ -+ call timer%stop("loop_eigenvectors") -+ -+#ifdef WITH_MPI -+ call mpi_allreduce(max_z_diff, glob_max_z_diff, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, & -+ int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#else -+ glob_max_z_diff = max_z_diff -+#endif -+ if(myid == 0) print *, 'Maximum error in eigenvalues :', max_ev_diff -+ if (check_eigenvectors) then -+ if(myid == 0) print *, 'Maximum error in eigenvectors :', glob_max_z_diff -+ endif -+ -+ status = 0 -+ if (nev .gt. 2) then -+ if (max_ev_diff .gt. tol_eigenvalues .or. max_ev_diff .eq. 0.0_rk) status = 1 -+ if (check_eigenvectors) then -+ if (glob_max_z_diff .gt. tol_eigenvectors .or. glob_max_z_diff .eq. 0.0_rk) status = 1 -+ endif -+ else -+ if (max_ev_diff .gt. tol_eigenvalues) status = 1 -+ if (check_eigenvectors) then -+ if (glob_max_z_diff .gt. tol_eigenvectors) status = 1 -+ endif -+ endif -+ -+ call timer%stop("check_correctness_analytic") -+ if(myid == 0 .and. print_timer) then -+ call timer%print("check_correctness_analytic") -+ end if -+ call timer%free() -+ end function -+ -+ -+ function analytic_matrix_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j) result(element) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: na, i, j -+ MATH_DATATYPE(kind=REAL_DATATYPE) :: element -+ -+ element = analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j, ANALYTIC_MATRIX) -+ -+ end function -+ -+ function analytic_eigenvectors_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j) result(element) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: na, i, j -+ MATH_DATATYPE(kind=REAL_DATATYPE) :: element -+ -+ element = analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j, ANALYTIC_EIGENVECTORS) -+ -+ end function -+ -+ function analytic_eigenvalues_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i) result(element) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: na, i -+ real(kind=REAL_DATATYPE) :: element -+ -+ element = analytic_real_& -+ &PRECISION& -+ &(na, i, i, ANALYTIC_EIGENVALUES) -+ -+ end function -+ -+ function analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j, what) result(element) -+ use precision_for_tests -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: na, i, j, what -+ MATH_DATATYPE(kind=rck) :: element, mat2x2(2,2), mat(5,5) -+ real(kind=rk) :: a, am, amp -+ TEST_INT_TYPE :: levels(num_primes) -+ TEST_INT_TYPE :: ii, jj, m, prime_id, prime, total_level, level -+ -+ real(kind=rk), parameter :: s = 0.5_rk -+ real(kind=rk), parameter :: c = 0.86602540378443864679_rk -+ real(kind=rk), parameter :: sq2 = 1.4142135623730950488_rk -+ -+ real(kind=rk), parameter :: largest_ev = 2.0_rk -+ -+ assert(i <= na) -+ assert(j <= na) -+ assert(i >= 0) -+ assert(j >= 0) -+ assert(decompose(na, levels)) -+ ! go to zero-based indexing -+ ii = i - 1 -+ jj = j - 1 -+ if (na .gt. 2) then -+ a = exp(log(largest_ev)/(na-1)) -+ else -+ a = exp(log(largest_ev)/(1)) -+ endif -+ -+ element = 1.0_rck -+#ifdef COMPLEXCASE -+ element = (1.0_rk, 0.0_rk) -+#endif -+ total_level = 0 -+ am = a -+ do prime_id = 1,num_primes -+ prime = primes(prime_id) -+ do level = 1, levels(prime_id) -+ amp = am**(prime-1) -+ total_level = total_level + 1 -+ if(what == ANALYTIC_MATRIX) then -+#ifdef REALCASE -+ mat2x2 = reshape((/ c*c + amp * s*s, (amp - 1.0_rk) * s*c, & -+ (amp - 1.0_rk) * s*c, s*s + amp * c*c /), & -+ (/2, 2/), order=(/2,1/)) -+#endif -+#ifdef COMPLEXCASE -+ mat2x2 = reshape((/ 0.5_rck * (amp + 1.0_rck) * (1.0_rk, 0.0_rk), sq2/4.0_rk * (amp - 1.0_rk) * (1.0_rk, 1.0_rk), & -+ sq2/4.0_rk * (amp - 1.0_rk) * (1.0_rk, -1.0_rk), 0.5_rck * (amp + 1.0_rck) * (1.0_rk, 0.0_rk) /), & -+ (/2, 2/), order=(/2,1/)) -+! intel 2018 does not reshape correctly (one would have to specify order=(/1,2/) -+! until this is resolved, I resorted to the following -+ mat2x2(1,2) = sq2/4.0_rk * (amp - 1.0_rk) * (1.0_rk, 1.0_rk) -+ mat2x2(2,1) = sq2/4.0_rk * (amp - 1.0_rk) * (1.0_rk, -1.0_rk) -+#endif -+ else if(what == ANALYTIC_EIGENVECTORS) then -+#ifdef REALCASE -+ mat2x2 = reshape((/ c, s, & -+ -s, c /), & -+ (/2, 2/), order=(/2,1/)) -+! intel 2018 does not reshape correctly (one would have to specify order=(/1,2/) -+! until this is resolved, I resorted to the following -+ mat2x2(1,2) = s -+ mat2x2(2,1) = -s -+#endif -+#ifdef COMPLEXCASE -+ mat2x2 = reshape((/ -sq2/2.0_rck * (1.0_rk, 0.0_rk), -sq2/2.0_rck * (1.0_rk, 0.0_rk), & -+ 0.5_rk * (1.0_rk, -1.0_rk), 0.5_rk * (-1.0_rk, 1.0_rk) /), & -+ (/2, 2/), order=(/2,1/)) -+! intel 2018 does not reshape correctly (one would have to specify order=(/1,2/) -+! until this is resolved, I resorted to the following -+ mat2x2(1,2) = -sq2/2.0_rck * (1.0_rk, 0.0_rk) -+ mat2x2(2,1) = 0.5_rk * (1.0_rk, -1.0_rk) -+#endif -+ else if(what == ANALYTIC_EIGENVALUES) then -+ mat2x2 = reshape((/ 1.0_rck, 0.0_rck, & -+ 0.0_rck, amp /), & -+ (/2, 2/), order=(/2,1/)) -+ else -+ assert(.false.) -+ end if -+ -+ mat = 0.0_rck -+ if(prime == 2) then -+ mat(1:2, 1:2) = mat2x2 -+ else if(prime == 3) then -+ mat((/1,3/),(/1,3/)) = mat2x2 -+ if(what == ANALYTIC_EIGENVECTORS) then -+ mat(2,2) = 1.0_rck -+ else -+ mat(2,2) = am -+ end if -+ else if(prime == 5) then -+ mat((/1,5/),(/1,5/)) = mat2x2 -+ if(what == ANALYTIC_EIGENVECTORS) then -+ mat(2,2) = 1.0_rck -+ mat(3,3) = 1.0_rck -+ mat(4,4) = 1.0_rck -+ else -+ mat(2,2) = am -+ mat(3,3) = am**2 -+ mat(4,4) = am**3 -+ end if -+ else -+ assert(.false.) -+ end if -+ -+ ! write(*,*) "calc value, elem: ", element, ", mat: ", mod(ii,2), mod(jj,2), mat(mod(ii,2), mod(jj,2)), "am ", am -+ ! write(*,*) " matrix mat", mat -+ element = element * mat(mod(ii,prime) + 1, mod(jj,prime) + 1) -+ ii = ii / prime -+ jj = jj / prime -+ -+ am = am**prime -+ end do -+ end do -+ !write(*,*) "returning value ", element -+ end function -+ -+ -+ subroutine print_matrix_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid, na, mat, mat_name) -+ use precision_for_tests -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: myid, na -+ character(len=*), intent(in) :: mat_name -+ MATH_DATATYPE(kind=rck) :: mat(na, na) -+ TEST_INT_TYPE :: i,j -+ character(len=20) :: na_str -+ -+ if(myid .ne. 0) & -+ return -+ write(*,*) "Matrix: "//trim(mat_name) -+ write(na_str, *) na -+ do i = 1, na -+#ifdef REALCASE -+ write(*, '('//trim(na_str)//'f8.3)') mat(i, :) -+#endif -+#ifdef COMPLEXCASE -+ write(*,'('//trim(na_str)//'(A,f8.3,A,f8.3,A))') ('(', real(mat(i,j)), ',', aimag(mat(i,j)), ')', j=1,na) -+#endif -+ end do -+ write(*,*) -+ end subroutine -+ -+ -+ subroutine check_matrices_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid, na) -+ use precision_for_tests -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: myid, na -+ MATH_DATATYPE(kind=rck) :: A(na, na), S(na, na), L(na, na), res(na, na) -+ TEST_INT_TYPE :: i, j, decomposition(num_primes) -+ -+ real(kind=rk) :: err -+#ifdef DOUBLE_PRECISION -+ real(kind=rk), parameter :: TOL = 1e-8 -+#endif -+#ifdef SINGLE_PRECISION -+ real(kind=rk), parameter :: TOL = 1e-4 -+#endif -+ -+ assert(decompose(na, decomposition)) -+ -+ do i = 1, na -+ do j = 1, na -+ A(i,j) = analytic_matrix_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j) -+ S(i,j) = analytic_eigenvectors_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j) -+ L(i,j) = analytic_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(na, i, j, ANALYTIC_EIGENVALUES) -+ end do -+ end do -+ -+ res = matmul(A,S) - matmul(S,L) -+ err = maxval(abs(res)) -+ -+ if(err > TOL) then -+ print *, "WARNING: sanity test in module analytic failed, error is ", err -+ end if -+ -+ if(.false.) then -+ !if(na == 2 .or. na == 5) then -+ call print_matrix(myid, na, A, "A") -+ call print_matrix(myid, na, S, "S") -+ call print_matrix(myid, na, L, "L") -+ -+ call print_matrix(myid, na, matmul(A,S), "AS") -+ call print_matrix(myid, na, matmul(S,L), "SL") -+ -+ call print_matrix(myid, na, res , "res") -+ end if -+ -+ end subroutine -+ -+ subroutine check_module_sanity_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid) -+ use precision_for_tests -+ -+ implicit none -+ TEST_INT_TYPE, intent(in) :: myid -+ TEST_INT_TYPE :: decomposition(num_primes), i -+ TEST_INT_TYPE, parameter :: check_sizes(7) = (/2, 3, 5, 6, 10, 25, 150/) -+ if(myid == 0) print *, "Checking test_analytic module sanity.... " -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+ assert(decompose(1500_lik, decomposition)) -+#else -+ assert(decompose(1500_ik, decomposition)) -+#endif -+ assert(all(decomposition == (/2,1,3/))) -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+ assert(decompose(6_lik,decomposition)) -+#else -+ assert(decompose(6_ik,decomposition)) -+#endif -+ assert(all(decomposition == (/1,1,0/))) -+ -+ do i =1, size(check_sizes) -+ call check_matrices_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &(myid, check_sizes(i)) -+ end do -+ -+ if(myid == 0) print *, "Checking test_analytic module sanity.... DONE" -+ -+ end subroutine -diff -ruN elpa-2020.11.001/examples/shared/test_blacs_infrastructure.F90 elpa-2020.11.001_ok/examples/shared/test_blacs_infrastructure.F90 ---- elpa-2020.11.001/examples/shared/test_blacs_infrastructure.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_blacs_infrastructure.F90 2021-02-02 12:54:50.044386000 +0100 -@@ -0,0 +1,208 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#define TEST_C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#define TEST_C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#endif -+ -+module test_blacs_infrastructure -+ -+ contains -+ -+ !c> void set_up_blacsgrid_f(TEST_C_INT_TYPE mpi_comm_parent, TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, char layout, -+ !c> TEST_C_INT_TYPE_PTR my_blacs_ctxt, TEST_C_INT_TYPE_PTR my_prow, -+ !c> TEST_C_INT_TYPE_PTR my_pcol); -+ subroutine set_up_blacsgrid(mpi_comm_parent, np_rows, np_cols, layout, & -+ my_blacs_ctxt, my_prow, my_pcol) bind(C, name="set_up_blacsgrid_f") -+ -+ use precision_for_tests -+ use test_util -+ use iso_c_binding -+ -+ implicit none -+ TEST_INT_TYPE, intent(in), value :: mpi_comm_parent, np_rows, np_cols -+#ifdef SXAURORA -+ character(len=1), intent(in) :: layout -+#else -+ character(kind=c_char), intent(in), value :: layout -+#endif -+ TEST_INT_TYPE, intent(out) :: my_blacs_ctxt, my_prow, my_pcol -+ -+#ifdef WITH_MPI -+ TEST_INT_TYPE :: np_rows_, np_cols_ -+#endif -+ -+ if (layout /= 'R' .and. layout /= 'C') then -+ print *, "layout must be 'R' or 'C'" -+ stop 1 -+ end if -+ -+ my_blacs_ctxt = mpi_comm_parent -+#ifdef WITH_MPI -+ call BLACS_Gridinit(my_blacs_ctxt, layout, np_rows, np_cols) -+ call BLACS_Gridinfo(my_blacs_ctxt, np_rows_, np_cols_, my_prow, my_pcol) -+ if (np_rows /= np_rows_) then -+ print *, "BLACS_Gridinfo returned different values for np_rows as set by BLACS_Gridinit" -+ stop 1 -+ endif -+ if (np_cols /= np_cols_) then -+ print *, "BLACS_Gridinfo returned different values for np_cols as set by BLACS_Gridinit" -+ stop 1 -+ endif -+#else -+ my_prow = 0 -+ my_pcol = 0 -+#endif -+ end subroutine -+ -+ subroutine set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, na_rows, & -+ na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ use elpa_utilities, only : error_unit -+ use test_util -+ use precision_for_tests -+ use tests_scalapack_interfaces -+ implicit none -+ -+ TEST_INT_TYPE, intent(in) :: na, nblk, my_prow, my_pcol, np_rows, & -+ np_cols, & -+ my_blacs_ctxt -+ TEST_INT_TYPE, intent(inout) :: info -+ TEST_INT_TYPE, intent(out) :: na_rows, na_cols, sc_desc(1:9) -+ -+#ifdef WITH_MPI -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ sc_desc(:) = 0 -+ ! determine the neccessary size of the distributed matrices, -+ ! we use the scalapack tools routine NUMROC -+ -+ na_rows = numroc(na, nblk, my_prow, 0_BLAS_KIND, np_rows) -+ na_cols = numroc(na, nblk, my_pcol, 0_BLAS_KIND, np_cols) -+ -+ ! set up the scalapack descriptor for the checks below -+ ! For ELPA the following restrictions hold: -+ ! - block sizes in both directions must be identical (args 4 a. 5) -+ ! - first row and column of the distributed matrix must be on -+ ! row/col 0/0 (arg 6 and 7) -+ -+ call descinit(sc_desc, na, na, nblk, nblk, 0_BLAS_KIND, 0_BLAS_KIND, & -+ my_blacs_ctxt, na_rows, info) -+ -+ if (info .ne. 0) then -+ write(error_unit,*) 'Error in BLACS descinit! info=',info -+ write(error_unit,*) 'Most likely this happend since you want to use' -+ write(error_unit,*) 'more MPI tasks than are possible for your' -+ write(error_unit,*) 'problem size (matrix size and blocksize)!' -+ write(error_unit,*) 'The blacsgrid can not be set up properly' -+ write(error_unit,*) 'Try reducing the number of MPI tasks...' -+ call MPI_ABORT(int(mpi_comm_world,kind=MPI_KIND), 1_MPI_KIND, mpierr) -+ endif -+#else /* WITH_MPI */ -+ na_rows = na -+ na_cols = na -+#endif /* WITH_MPI */ -+ -+ end subroutine -+ -+ !c> void set_up_blacs_descriptor_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nblk, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE_PTR na_rows, TEST_C_INT_TYPE_PTR na_cols, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE my_blacs_ctxt, -+ !c> TEST_C_INT_TYPE_PTR info); -+ subroutine set_up_blacs_descriptor_f(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, na_rows, & -+ na_cols, sc_desc, & -+ my_blacs_ctxt, info) & -+ bind(C, name="set_up_blacs_descriptor_f") -+ -+ use iso_c_binding -+ implicit none -+ -+ -+ TEST_INT_TYPE, value :: na, nblk, my_prow, my_pcol, np_rows, & -+ np_cols, my_blacs_ctxt -+ TEST_INT_TYPE :: na_rows, na_cols, info, sc_desc(1:9) -+ -+ call set_up_blacs_descriptor(na, nblk, my_prow, my_pcol, & -+ np_rows, np_cols, na_rows, & -+ na_cols, sc_desc, my_blacs_ctxt, info) -+ -+ -+ end subroutine -+ -+ -+ function index_l2g(idx_loc, nblk, iproc, nprocs) result(indexl2g) -+ use precision_for_tests -+ implicit none -+ TEST_INT_TYPE :: indexl2g -+ TEST_INT_TYPE :: idx_loc, nblk, iproc, nprocs -+ indexl2g = nprocs * nblk * ((idx_loc-1) / nblk) + mod(idx_loc-1,nblk) + mod(nprocs+iproc, nprocs)*nblk + 1 -+ return -+ end function -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_check_correctness.F90 elpa-2020.11.001_ok/examples/shared/test_check_correctness.F90 ---- elpa-2020.11.001/examples/shared/test_check_correctness.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_check_correctness.F90 2021-02-02 12:54:50.054408000 +0100 -@@ -0,0 +1,156 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! Author: A. Marek, MPCDF -+#include "config-f90.h" -+ -+module test_check_correctness -+ use test_util -+ -+ interface check_correctness_evp_numeric_residuals -+ module procedure check_correctness_evp_numeric_residuals_complex_double -+ module procedure check_correctness_evp_numeric_residuals_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_evp_numeric_residuals_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_evp_numeric_residuals_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_evp_numeric_residuals_ss -+! module procedure check_correctness_evp_numeric_residuals_ss_complex_double -+ module procedure check_correctness_evp_numeric_residuals_ss_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_evp_numeric_residuals_ss_real_single -+#endif -+! #ifdef WANT_SINGLE_PRECISION_COMPLEX -+! module procedure check_correctness_evp_numeric_residuals_ss_complex_single -+! #endif -+ end interface -+ -+ interface check_correctness_eigenvalues_toeplitz -+ module procedure check_correctness_eigenvalues_toeplitz_complex_double -+ module procedure check_correctness_eigenvalues_toeplitz_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_eigenvalues_toeplitz_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_eigenvalues_toeplitz_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_eigenvalues_frank -+ module procedure check_correctness_eigenvalues_frank_complex_double -+ module procedure check_correctness_eigenvalues_frank_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_eigenvalues_frank_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_eigenvalues_frank_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_cholesky -+ module procedure check_correctness_cholesky_complex_double -+ module procedure check_correctness_cholesky_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_cholesky_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_cholesky_complex_single -+#endif -+ end interface -+ -+ interface check_correctness_hermitian_multiply -+ module procedure check_correctness_hermitian_multiply_complex_double -+ module procedure check_correctness_hermitian_multiply_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure check_correctness_hermitian_multiply_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure check_correctness_hermitian_multiply_complex_single -+#endif -+ end interface -+ -+ -+ contains -+ -+#define COMPLEXCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_check_correctness_template.F90" -+#undef DOUBLE_PRECISION -+#undef COMPLEXCASE -+ -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ -+#define COMPLEXCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_check_correctness_template.F90" -+#undef SINGLE_PRECISION -+#undef COMPLEXCASE -+#endif /* WANT_SINGLE_PRECISION_COMPLEX */ -+ -+#define REALCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_check_correctness_template.F90" -+#undef DOUBLE_PRECISION -+#undef REALCASE -+ -+#ifdef WANT_SINGLE_PRECISION_REAL -+ -+#define REALCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_check_correctness_template.F90" -+#undef SINGLE_PRECISION -+#undef REALCASE -+ -+ -+#endif /* WANT_SINGLE_PRECISION_REAL */ -+ -+#include "../../src/general/prow_pcol.F90" -+#include "../../src/general/map_global_to_local.F90" -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_check_correctness_template.F90 elpa-2020.11.001_ok/examples/shared/test_check_correctness_template.F90 ---- elpa-2020.11.001/examples/shared/test_check_correctness_template.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_check_correctness_template.F90 2021-02-02 12:54:50.053371000 +0100 -@@ -0,0 +1,1134 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! Author: A. Marek, MPCDF -+ -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE lik -+#define TEST_C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE ik -+#define TEST_C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#endif -+ -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE lik -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE ik -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#endif -+ -+#if REALCASE == 1 -+ function check_correctness_evp_numeric_residuals_ss_real_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) result(status) -+ use tests_blas_interfaces -+ use tests_scalapack_interfaces -+ use precision_for_tests -+ use iso_c_binding -+ implicit none -+#include "../../src/general/precision_kinds.F90" -+ integer(kind=BLAS_KIND) :: status, na_cols, na_rows -+ integer(kind=BLAS_KIND), intent(in) :: na, nev, nblk, myid, np_rows, np_cols, my_prow, my_pcol -+ real(kind=rk), intent(in) :: as(:,:) -+ real(kind=rk) :: tmpr -+ complex(kind=rck), intent(in) :: z(:,:) -+ real(kind=rk) :: ev(:) -+ complex(kind=rck), dimension(size(as,dim=1),size(as,dim=2)) :: tmp1, tmp2 -+ complex(kind=rck) :: xc -+ -+ complex(kind=rck), allocatable :: as_complex(:,:) -+ -+ integer(kind=BLAS_KIND) :: sc_desc(:) -+ -+ integer(kind=BLAS_KIND) :: i, j, rowLocal, colLocal -+ integer(kind=c_int) :: row_Local, col_Local -+ real(kind=rck) :: err, errmax -+ -+ integer :: mpierr -+ -+ ! tolerance for the residual test for different math type/precision setups -+ real(kind=rk), parameter :: tol_res_real_double = 5e-4_rk -+ real(kind=rk), parameter :: tol_res_real_single = 3e-2_rk -+ real(kind=rk), parameter :: tol_res_complex_double = 5e-12_rk -+ real(kind=rk), parameter :: tol_res_complex_single = 3e-2_rk -+ real(kind=rk) :: tol_res = tol_res_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION -+ ! precision of generalized problem is lower -+ real(kind=rk), parameter :: generalized_penalty = 10.0_rk -+ -+ ! tolerance for the orthogonality test for different math type/precision setups -+! real(kind=rk), parameter :: tol_orth_real_double = 5e-11_rk -+ real(kind=rk), parameter :: tol_orth_real_double = 5e-4_rk -+ real(kind=rk), parameter :: tol_orth_real_single = 9e-2_rk -+ real(kind=rk), parameter :: tol_orth_complex_double = 5e-11_rk -+ real(kind=rk), parameter :: tol_orth_complex_single = 9e-3_rk -+ real(kind=rk), parameter :: tol_orth = tol_orth_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION -+ -+ complex(kind=rck), parameter :: CZERO = (0.0_rck,0.0_rck), CONE = (1.0_rck,0.0_rck) -+ -+ -+ status = 0 -+ ! Setup complex matrices and eigenvalues -+ na_rows = size(as,dim=1) -+ na_cols = size(as,dim=2) -+ -+ allocate(as_complex(na_rows,na_cols)) -+ do j=1, na_cols -+ do i=1,na_rows -+#ifdef DOUBLE_PRECISION_REAL -+ as_complex(i,j) = dcmplx(as(i,j),0.0_rk) -+#else -+ as_complex(i,j) = cmplx(as(i,j),0.0_rk) -+#endif -+ enddo -+ enddo -+ -+ ! 1. Residual (maximum of || A*Zi - Zi*EVi ||) -+ -+ ! tmp1 = Zi*EVi -+ tmp1(:,:) = z(:,:) -+ do i=1,nev -+#ifdef DOUBLE_PRECISION_REAL -+ xc = dcmplx(0.0_rk,ev(i)) -+#else -+ xc = cmplx(0.0_rk,ev(i)) -+#endif -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call pzscal(int(na,kind=BLAS_KIND), xc, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, 1_BLAS_KIND) -+#else -+ call pcscal(int(na,kind=BLAS_KIND), xc, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, 1_BLAS_KIND) -+#endif -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ call zscal(int(na,kind=BLAS_KIND), xc, tmp1(:,i), 1_BLAS_KIND) -+#else -+ call cscal(int(na,kind=BLAS_KIND), xc, tmp1(:,i), 1_BLAS_KIND) -+#endif -+#endif /* WITH_MPI */ -+ enddo -+ -+ ! normal eigenvalue problem .. no need to multiply -+ tmp2(:,:) = tmp1(:,:) -+ -+ ! tmp1 = A * Z -+ ! as is original stored matrix, Z are the EVs -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call PZGEMM('N', 'N', int(na,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), & -+ CONE, as_complex, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, CZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else -+ call PCGEMM('N', 'N', int(na,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), & -+ CONE, as_complex, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, CZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#endif -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ call ZGEMM('N','N',int(na,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), CONE, & -+ as_complex, int(na,kind=BLAS_KIND), z,int(na,kind=BLAS_KIND), CZERO, tmp1, int(na,kind=BLAS_KIND) ) -+#else -+ call CGEMM('N','N', int(na,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), CONE, & -+ as_complex, int(na,kind=BLAS_KIND), z, int(na,kind=BLAS_KIND), CZERO, tmp1, int(na,kind=BLAS_KIND) ) -+#endif -+#endif /* WITH_MPI */ -+ -+ ! tmp1 = A*Zi - Zi*EVi -+ tmp1(:,:) = tmp1(:,:) - tmp2(:,:) -+ -+ ! Get maximum norm of columns of tmp1 -+ errmax = 0.0_rk -+ -+ do i=1,nev -+ xc = (0.0_rk,0.0_rk) -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call PZDOTC(int(na,kind=BLAS_KIND), xc, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, & -+ 1_BLAS_KIND, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, 1_BLAS_KIND) -+#else -+ call PCDOTC(int(na,kind=BLAS_KIND), xc, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, & -+ 1_BLAS_KIND, tmp1, 1_BLAS_KIND, int(i,kind=BLAS_KIND), sc_desc, 1_BLAS_KIND) -+#endif -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ xc = ZDOTC(int(na,kind=BLAS_KIND) ,tmp1, 1_BLAS_KIND, tmp1, 1_BLAS_KIND) -+#else -+ xc = CDOTC(int(na,kind=BLAS_KIND) ,tmp1, 1_BLAS_KIND, tmp1, 1_BLAS_KIND) -+#endif -+#endif /* WITH_MPI */ -+ errmax = max(errmax, sqrt(real(xc,kind=REAL_DATATYPE))) -+ enddo -+ -+ ! Get maximum error norm over all processors -+ err = errmax -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'%Results of numerical residual checks, using complex arithmetic:' -+ if (myid==0) print *,'%Error Residual :',errmax -+ if (nev .ge. 2) then -+ if (errmax .gt. tol_res .or. errmax .eq. 0.0_rk) then -+ status = 1 -+ endif -+ else -+ if (errmax .gt. tol_res) then -+ status = 1 -+ endif -+ endif -+ -+ ! 2. Eigenvector orthogonality -+ tmp2(:,:) = z(:,:) -+ tmp1 = 0 -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call PZGEMM('C', 'N', int(nev,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), & -+ CONE, z, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, CZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else -+ call PCGEMM('C', 'N', int(nev,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND), & -+ CONE, z, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, CZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#endif -+ -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ call ZGEMM('C','N', int(nev,kind=BLAS_KIND) , int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND),CONE, z, & -+ int(na,kind=BLAS_KIND), tmp2, int(na,kind=BLAS_KIND), CZERO, tmp1, int(na,kind=BLAS_KIND)) -+#else -+ call CGEMM('C','N', int(nev,kind=BLAS_KIND) , int(nev,kind=BLAS_KIND), int(na,kind=BLAS_KIND),CONE, z, & -+ int(na,kind=BLAS_KIND), tmp2, int(na,kind=BLAS_KIND), CZERO, tmp1, int(na,kind=BLAS_KIND)) -+#endif -+#endif /* WITH_MPI */ -+ ! First check, whether the elements on diagonal are 1 .. "normality" of the vectors -+ err = 0.0_rk -+ do i=1, nev -+ if (map_global_array_index_to_local_index(int(i,kind=c_int), int(i,kind=c_int), row_Local, col_Local, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int)) ) then -+ rowLocal = int(row_Local,kind=INT_TYPE) -+ colLocal = int(col_Local,kind=INT_TYPE) -+ err = max(err, abs(tmp1(rowLocal,colLocal) - CONE)) -+ endif -+ end do -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'%Maximal error in eigenvector lengths:',errmax -+ -+ ! Second, find the maximal error in the whole Z**T * Z matrix (its diference from identity matrix) -+ ! Initialize tmp2 to unit matrix -+ tmp2 = 0 -+#ifdef WITH_MPI -+#ifdef DOUBLE_PRECISION_REAL -+ call PZLASET('A', int(nev,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), CZERO, CONE, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else -+ call PCLASET('A', int(nev,kind=BLAS_KIND), int(nev,kind=BLAS_KIND), CZERO, CONE, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#endif -+#else /* WITH_MPI */ -+#ifdef DOUBLE_PRECISION_REAL -+ call ZLASET('A',int(nev,kind=BLAS_KIND) ,int(nev,kind=BLAS_KIND) ,CZERO, CONE, tmp2, int(na,kind=BLAS_KIND)) -+#else -+ call CLASET('A',int(nev,kind=BLAS_KIND) ,int(nev,kind=BLAS_KIND) ,CZERO, CONE, tmp2, int(na,kind=BLAS_KIND)) -+#endif -+#endif /* WITH_MPI */ -+ -+ ! ! tmp1 = Z**T * Z - Unit Matrix -+ tmp1(:,:) = tmp1(:,:) - tmp2(:,:) -+ -+ ! Get maximum error (max abs value in tmp1) -+ err = maxval(abs(tmp1)) -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, int(MPI_COMM_WORLD,kind=MPI_KIND), mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'%Error Orthogonality:',errmax -+ -+ if (nev .ge. 2) then -+ if (errmax .gt. tol_orth .or. errmax .eq. 0.0_rk) then -+ status = 1 -+ endif -+ else -+ if (errmax .gt. tol_orth) then -+ status = 1 -+ endif -+ endif -+ -+ deallocate(as_complex) -+ end function -+ -+#endif /* REALCASE */ -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_ss_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> double *as, complex double *z, double *ev, TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_ss_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> float *as, complex float *z, float *ev, TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* REALCASE */ -+ -+#if REALCASE == 1 -+function check_correctness_evp_numeric_residuals_ss_real_& -+&PRECISION& -+&_f (na, nev, na_rows, na_cols, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) result(status) & -+ bind(C,name="check_correctness_evp_numeric_residuals_ss_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ -+ use precision_for_tests -+ use iso_c_binding -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, value :: na, nev, myid, na_rows, na_cols, nblk, np_rows, np_cols, my_prow, my_pcol -+ real(kind=rck) :: as(1:na_rows,1:na_cols) -+ complex(kind=rck) :: z(1:na_rows,1:na_cols) -+ real(kind=rck) :: ev(1:na) -+ TEST_INT_TYPE :: sc_desc(1:9) -+ -+ status = check_correctness_evp_numeric_residuals_ss_real_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ end function -+#endif /* REALCASE */ -+ -+function check_correctness_evp_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs) result(status) -+ -+ use tests_blas_interfaces -+ use tests_scalapack_interfaces -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, intent(in) :: na, nev, nblk, myid, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=rck), intent(in) :: as(:,:), z(:,:) -+ MATH_DATATYPE(kind=rck), intent(in), optional :: bs(:,:) -+ real(kind=rk) :: ev(:) -+ MATH_DATATYPE(kind=rck), dimension(size(as,dim=1),size(as,dim=2)) :: tmp1, tmp2 -+ MATH_DATATYPE(kind=rck) :: xc -+ -+ TEST_INT_TYPE :: sc_desc(:) -+ -+ TEST_INT_TYPE :: i, rowLocal, colLocal -+ integer(kind=c_int) :: row_Local, col_Local -+ real(kind=rck) :: err, errmax -+ -+ TEST_INT_MPI_TYPE :: mpierr -+ -+! tolerance for the residual test for different math type/precision setups -+ real(kind=rk), parameter :: tol_res_real_double = 5e-12_rk -+ real(kind=rk), parameter :: tol_res_real_single = 3e-2_rk -+ real(kind=rk), parameter :: tol_res_complex_double = 5e-12_rk -+ real(kind=rk), parameter :: tol_res_complex_single = 3e-2_rk -+ real(kind=rk) :: tol_res = tol_res_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION -+ ! precision of generalized problem is lower -+ real(kind=rk), parameter :: generalized_penalty = 10.0_rk -+ -+ ! tolerance for the orthogonality test for different math type/precision setups -+ real(kind=rk), parameter :: tol_orth_real_double = 5e-11_rk -+ real(kind=rk), parameter :: tol_orth_real_single = 9e-2_rk -+ real(kind=rk), parameter :: tol_orth_complex_double = 5e-11_rk -+ real(kind=rk), parameter :: tol_orth_complex_single = 9e-3_rk -+ real(kind=rk), parameter :: tol_orth = tol_orth_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION -+ -+ if (present(bs)) then -+ tol_res = generalized_penalty * tol_res -+ endif -+ status = 0 -+ -+ ! 1. Residual (maximum of || A*Zi - Zi*EVi ||) -+ -+! tmp1 = Zi*EVi -+ tmp1(:,:) = z(:,:) -+ do i=1,nev -+ xc = ev(i) -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &scal(na, xc, tmp1, 1_BLAS_KIND, i, sc_desc, 1_BLAS_KIND) -+#else /* WITH_MPI */ -+ call BLAS_CHAR& -+ &scal(na, xc, tmp1(:,i), 1_BLAS_KIND) -+#endif /* WITH_MPI */ -+ enddo -+ -+ ! for generalized EV problem, multiply by bs as well -+ ! tmp2 = B * tmp1 -+ if(present(bs)) then -+#ifdef WITH_MPI -+ call scal_PRECISION_GEMM('N', 'N', na, nev, na, ONE, bs, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_GEMM('N','N',na,nev,na,ONE,bs,na,tmp1,na,ZERO,tmp2,na) -+#endif /* WITH_MPI */ -+ else -+ ! normal eigenvalue problem .. no need to multiply -+ tmp2(:,:) = tmp1(:,:) -+ end if -+ -+ ! tmp1 = A * Z -+ ! as is original stored matrix, Z are the EVs -+#ifdef WITH_MPI -+ call scal_PRECISION_GEMM('N', 'N', na, nev, na, ONE, as, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_GEMM('N','N',na,nev,na,ONE,as,na,z,na,ZERO,tmp1,na) -+#endif /* WITH_MPI */ -+ -+ ! tmp1 = A*Zi - Zi*EVi -+ tmp1(:,:) = tmp1(:,:) - tmp2(:,:) -+ -+ ! Get maximum norm of columns of tmp1 -+ errmax = 0.0_rk -+ -+ do i=1,nev -+#if REALCASE == 1 -+ err = 0.0_rk -+#ifdef WITH_MPI -+ call scal_PRECISION_NRM2(na, err, tmp1, 1_BLAS_KIND, i, sc_desc, 1_BLAS_KIND) -+#else /* WITH_MPI */ -+ err = PRECISION_NRM2(na,tmp1(1,i),1_BLAS_KIND) -+#endif /* WITH_MPI */ -+ errmax = max(errmax, err) -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+ xc = 0 -+#ifdef WITH_MPI -+ call scal_PRECISION_DOTC(na, xc, tmp1, 1_BLAS_KIND, i, sc_desc, & -+ 1_BLAS_KIND, tmp1, 1_BLAS_KIND, i, sc_desc, 1_BLAS_KIND) -+#else /* WITH_MPI */ -+ xc = PRECISION_DOTC(na,tmp1,1_BLAS_KIND,tmp1,1_BLAS_KIND) -+#endif /* WITH_MPI */ -+ errmax = max(errmax, sqrt(real(xc,kind=REAL_DATATYPE))) -+#endif /* COMPLEXCASE */ -+ enddo -+ -+ ! Get maximum error norm over all processors -+ err = errmax -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'Results of numerical residual checks:' -+ if (myid==0) print *,'Error Residual :',errmax -+ if (nev .ge. 2) then -+ if (errmax .gt. tol_res .or. errmax .eq. 0.0_rk) then -+ status = 1 -+ endif -+ else -+ if (errmax .gt. tol_res) then -+ status = 1 -+ endif -+ endif -+ -+ ! 2. Eigenvector orthogonality -+ if(present(bs)) then -+ !for the generalized EVP, the eigenvectors should be B-orthogonal, not orthogonal -+ ! tmp2 = B * Z -+ tmp2(:,:) = 0.0_rck -+#ifdef WITH_MPI -+ call scal_PRECISION_GEMM('N', 'N', na, nev, na, ONE, bs, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_GEMM('N','N', na, nev, na, ONE, bs, na, z, na, ZERO, tmp2, na) -+#endif /* WITH_MPI */ -+ -+ else -+ tmp2(:,:) = z(:,:) -+ endif -+ ! tmp1 = Z**T * tmp2 -+ ! actually tmp1 = Z**T * Z for standard case and tmp1 = Z**T * B * Z for generalized -+ tmp1 = 0 -+#ifdef WITH_MPI -+ call scal_PRECISION_GEMM(BLAS_TRANS_OR_CONJ, 'N', nev, nev, na, ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, & -+ tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_GEMM(BLAS_TRANS_OR_CONJ,'N',nev,nev,na,ONE,z,na,tmp2,na,ZERO,tmp1,na) -+#endif /* WITH_MPI */ -+ ! First check, whether the elements on diagonal are 1 .. "normality" of the vectors -+ err = 0.0_rk -+ do i=1, nev -+ if (map_global_array_index_to_local_index(int(i,kind=c_int), int(i,kind=c_int) , row_Local, col_Local, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) )) then -+ rowLocal = int(row_Local,kind=INT_TYPE) -+ colLocal = int(col_Local,kind=INT_TYPE) -+ err = max(err, abs(tmp1(rowLocal,colLocal) - 1.0_rk)) -+ endif -+ end do -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'Maximal error in eigenvector lengths:',errmax -+ -+ ! Second, find the maximal error in the whole Z**T * Z matrix (its diference from identity matrix) -+ ! Initialize tmp2 to unit matrix -+ tmp2 = 0 -+#ifdef WITH_MPI -+ call scal_PRECISION_LASET('A', nev, nev, ZERO, ONE, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call PRECISION_LASET('A',nev,nev,ZERO,ONE,tmp2,na) -+#endif /* WITH_MPI */ -+ -+ ! ! tmp1 = Z**T * Z - Unit Matrix -+ tmp1(:,:) = tmp1(:,:) - tmp2(:,:) -+ -+ ! Get maximum error (max abs value in tmp1) -+ err = maxval(abs(tmp1)) -+#ifdef WITH_MPI -+ call mpi_allreduce(err, errmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ errmax = err -+#endif /* WITH_MPI */ -+ if (myid==0) print *,'Error Orthogonality:',errmax -+ -+ if (nev .ge. 2) then -+ if (errmax .gt. tol_orth .or. errmax .eq. 0.0_rk) then -+ status = 1 -+ endif -+ else -+ if (errmax .gt. tol_orth) then -+ status = 1 -+ endif -+ endif -+ end function -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> double *as, double *z, double *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> float *as, float *z, float *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_complex_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> complex double *as, complex double *z, double *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_numeric_residuals_complex_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> complex float *as, complex float *z, float *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* COMPLEXCASE */ -+ -+function check_correctness_evp_numeric_residuals_& -+&MATH_DATATYPE& -+&_& -+&PRECISION& -+&_f (na, nev, na_rows, na_cols, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) result(status) & -+ bind(C,name="check_correctness_evp_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ -+ use precision_for_tests -+ use iso_c_binding -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, value :: na, nev, myid, na_rows, na_cols, nblk, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=rck) :: as(1:na_rows,1:na_cols), z(1:na_rows,1:na_cols) -+ real(kind=rck) :: ev(1:na) -+ TEST_INT_TYPE :: sc_desc(1:9) -+ -+ status = check_correctness_evp_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol) -+ -+ end function -+ -+!---- variant for the generalized eigenproblem -+!---- unlike in Fortran, we cannot use optional parameter -+!---- we thus define a different function -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> TEST_C_INT_TYPE check_correctness_evp_gen_numeric_residuals_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> double *as, double *z, double *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol, -+ !c> double *bs); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_gen_numeric_residuals_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> float *as, float *z, float *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, -+ !c> TEST_C_INT_TYPE my_pcol, -+ !c> float *bs); -+#endif -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+ !c> TEST_C_INT_TYPE check_correctness_evp_gen_numeric_residuals_complex_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> complex double *as, complex double *z, double *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol, -+ !c> complex double *bs); -+#else -+ !c> TEST_C_INT_TYPE check_correctness_evp_gen_numeric_residuals_complex_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE nev, -+ !c> TEST_C_INT_TYPE na_rows, TEST_C_INT_TYPE na_cols, -+ !c> complex float *as, complex float *z, float *ev, -+ !c> TEST_C_INT_TYPE sc_desc[9], -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE myid, -+ !c> TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol, -+ !c> complex float *bs); -+#endif -+#endif /* COMPLEXCASE */ -+ -+function check_correctness_evp_gen_numeric_residuals_& -+&MATH_DATATYPE& -+&_& -+&PRECISION& -+&_f (na, nev, na_rows, na_cols, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs) result(status) & -+ bind(C,name="check_correctness_evp_gen_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ -+ use iso_c_binding -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, value :: na, nev, myid, na_rows, na_cols, nblk, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=rck) :: as(1:na_rows,1:na_cols), z(1:na_rows,1:na_cols), bs(1:na_rows,1:na_cols) -+ real(kind=rck) :: ev(1:na) -+ TEST_INT_TYPE :: sc_desc(1:9) -+ -+ status = check_correctness_evp_numeric_residuals_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, nev, as, z, ev, sc_desc, nblk, myid, np_rows, np_cols, my_prow, my_pcol, bs) -+ -+ end function -+ -+ !----------------------------------------------------------------------------------------------------------- -+ -+ function check_correctness_eigenvalues_toeplitz_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, diagonalElement, subdiagonalElement, ev, z, myid) result(status) -+ use iso_c_binding -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status, ii, j, myid -+ TEST_INT_TYPE, intent(in) :: na -+ real(kind=rck) :: diagonalElement, subdiagonalElement -+ real(kind=rck) :: ev_analytic(na), ev(na) -+ MATH_DATATYPE(kind=rck) :: z(:,:) -+ -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ real(kind=rck), parameter :: pi = 3.141592653589793238462643383279_c_double -+#else -+ real(kind=rck), parameter :: pi = 3.1415926535897932_c_float -+#endif -+ real(kind=rck) :: tmp, maxerr -+ TEST_INT_TYPE :: loctmp -+ status = 0 -+ -+ ! analytic solution -+ do ii=1, na -+ ev_analytic(ii) = diagonalElement + 2.0_rk * & -+ subdiagonalElement *cos( pi*real(ii,kind=rk)/ & -+ real(na+1,kind=rk) ) -+ enddo -+ -+ ! sort analytic solution: -+ -+ ! this hack is neither elegant, nor optimized: for huge matrixes it might be expensive -+ ! a proper sorting algorithmus might be implemented here -+ -+ tmp = minval(ev_analytic) -+ loctmp = minloc(ev_analytic, 1) -+ -+ ev_analytic(loctmp) = ev_analytic(1) -+ ev_analytic(1) = tmp -+ do ii=2, na -+ tmp = ev_analytic(ii) -+ do j= ii, na -+ if (ev_analytic(j) .lt. tmp) then -+ tmp = ev_analytic(j) -+ loctmp = j -+ endif -+ enddo -+ ev_analytic(loctmp) = ev_analytic(ii) -+ ev_analytic(ii) = tmp -+ enddo -+ -+ ! compute a simple error max of eigenvalues -+ maxerr = 0.0 -+ maxerr = maxval( (ev(:) - ev_analytic(:))/ev_analytic(:) , 1) -+ -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ if (maxerr .gt. 8.e-13_c_double .or. maxerr .eq. 0.0_c_double) then -+#else -+ if (maxerr .gt. 8.e-4_c_float .or. maxerr .eq. 0.0_c_float) then -+#endif -+ status = 1 -+ if (myid .eq. 0) then -+ print *,"Result of Toeplitz matrix test: " -+ print *,"Eigenvalues differ from analytic solution: maxerr = ",maxerr -+ endif -+ endif -+ -+ if (status .eq. 0) then -+ if (myid .eq. 0) then -+ print *,"Result of Toeplitz matrix test: test passed" -+ print *,"Eigenvalues differ from analytic solution: maxerr = ",maxerr -+ endif -+ endif -+ end function -+ -+ function check_correctness_cholesky_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, a, as, na_rows, sc_desc, myid) result(status) -+ use precision_for_tests -+ use tests_blas_interfaces -+ use tests_scalapack_interfaces -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, intent(in) :: na, myid, na_rows -+ -+ MATH_DATATYPE(kind=rck), intent(in) :: a(:,:), as(:,:) -+ MATH_DATATYPE(kind=rck), dimension(size(as,dim=1),size(as,dim=2)) :: tmp1, tmp2 -+#if COMPLEXCASE == 1 -+ ! needed for [z,c]lange from scalapack -+ real(kind=rk), dimension(2*size(as,dim=1),size(as,dim=2)) :: tmp1_real -+#endif -+ real(kind=rk) :: norm, normmax -+ -+ TEST_INT_TYPE :: sc_desc(:) -+ real(kind=rck) :: err, errmax -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ status = 0 -+ tmp1(:,:) = 0.0_rck -+ -+ -+#if REALCASE == 1 -+ ! tmp1 = a**T -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &tran(na, na, 1.0_rck, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ 0.0_rck, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ tmp1 = transpose(a) -+#endif /* WITH_MPI */ -+#endif /* REALCASE == 1 */ -+ -+#if COMPLEXCASE == 1 -+ ! tmp1 = a**H -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &tranc(na, na, ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ tmp1 = transpose(conjg(a)) -+#endif /* WITH_MPI */ -+#endif /* COMPLEXCASE == 1 */ -+ -+ ! tmp2 = a**T * a -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &gemm("N","N", na, na, na, ONE, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ a, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, ZERO, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ call BLAS_CHAR& -+ &gemm("N","N", na, na, na, ONE, tmp1, na, a, na, ZERO, tmp2, na) -+#endif /* WITH_MPI */ -+ -+ ! compare tmp2 with original matrix -+ tmp2(:,:) = tmp2(:,:) - as(:,:) -+ -+#ifdef WITH_MPI -+ norm = p& -+ &BLAS_CHAR& -+ &lange("M",na, na, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+#if COMPLEXCASE == 1 -+ tmp1_real) -+#else -+ tmp1) -+#endif -+#else /* WITH_MPI */ -+ norm = BLAS_CHAR& -+ &lange("M", na, na, tmp2, na_rows, & -+#if COMPLEXCASE == 1 -+ tmp1_real) -+#else -+ tmp1) -+#endif -+#endif /* WITH_MPI */ -+ -+ -+#ifdef WITH_MPI -+ call mpi_allreduce(norm, normmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ normmax = norm -+#endif /* WITH_MPI */ -+ -+ if (myid .eq. 0) then -+ print *," Maximum error of result: ", normmax -+ endif -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+! if (normmax .gt. 5e-12_rk8 .or. normmax .eq. 0.0_rk8) then -+ if (normmax .gt. 5e-12_rk8) then -+ status = 1 -+ endif -+#else -+! if (normmax .gt. 5e-4_rk4 .or. normmax .eq. 0.0_rk4) then -+ if (normmax .gt. 5e-4_rk4 ) then -+ status = 1 -+ endif -+#endif -+#endif -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+! if (normmax .gt. 5e-11_rk8 .or. normmax .eq. 0.0_rk8) then -+ if (normmax .gt. 5e-11_rk8 ) then -+ status = 1 -+ endif -+#else -+! if (normmax .gt. 5e-3_rk4 .or. normmax .eq. 0.0_rk4) then -+ if (normmax .gt. 5e-3_rk4) then -+ status = 1 -+ endif -+#endif -+#endif -+ end function -+ -+ function check_correctness_hermitian_multiply_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, a, b, c, na_rows, sc_desc, myid) result(status) -+ use precision_for_tests -+ use tests_blas_interfaces -+ use tests_scalapack_interfaces -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE :: status -+ TEST_INT_TYPE, intent(in) :: na, myid, na_rows -+ MATH_DATATYPE(kind=rck), intent(in) :: a(:,:), b(:,:), c(:,:) -+ MATH_DATATYPE(kind=rck), dimension(size(a,dim=1),size(a,dim=2)) :: tmp1, tmp2 -+#if COMPLEXCASE == 1 -+ real(kind=rk), dimension(2*size(a,dim=1),size(a,dim=2)) :: tmp1_real -+#endif -+ real(kind=rck) :: norm, normmax -+ -+ -+ TEST_INT_TYPE :: sc_desc(:) -+ real(kind=rck) :: err, errmax -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ status = 0 -+ tmp1(:,:) = ZERO -+ -+#if REALCASE == 1 -+ ! tmp1 = a**T -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &tran(na, na, ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ tmp1 = transpose(a) -+#endif /* WITH_MPI */ -+ -+#endif /* REALCASE == 1 */ -+ -+#if COMPLEXCASE == 1 -+ ! tmp1 = a**H -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &tranc(na, na, ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, ZERO, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else /* WITH_MPI */ -+ tmp1 = transpose(conjg(a)) -+#endif /* WITH_MPI */ -+#endif /* COMPLEXCASE == 1 */ -+ -+ ! tmp2 = tmp1 * b -+#ifdef WITH_MPI -+ call p& -+ &BLAS_CHAR& -+ &gemm("N","N", na, na, na, ONE, tmp1, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, b, 1_BLAS_KIND, 1_BLAS_KIND, & -+ sc_desc, ZERO, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) -+#else -+ call BLAS_CHAR& -+ &gemm("N","N", na, na, na, ONE, tmp1, na, b, na, ZERO, tmp2, na) -+#endif -+ -+ ! compare tmp2 with c -+ tmp2(:,:) = tmp2(:,:) - c(:,:) -+ -+#ifdef WITH_MPI -+ ! dirty hack: the last argument should be a real array, but is not referenced -+ ! if mode = "M", thus we get away with a complex argument -+ norm = p& -+ &BLAS_CHAR& -+ &lange("M", na, na, tmp2, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+#if COMPLEXCASE == 1 -+ tmp1_real) -+#else -+ tmp1) -+#endif -+#else /* WITH_MPI */ -+ ! dirty hack: the last argument should be a real array, but is not referenced -+ ! if mode = "M", thus we get away with a complex argument -+ norm = BLAS_CHAR& -+ &lange("M", na, na, tmp2, na_rows, & -+#if COMPLEXCASE == 1 -+ tmp1_real) -+#else -+ tmp1) -+#endif -+#endif /* WITH_MPI */ -+ -+#ifdef WITH_MPI -+ call mpi_allreduce(norm, normmax, 1_MPI_KIND, MPI_REAL_PRECISION, MPI_MAX, MPI_COMM_WORLD, mpierr) -+#else /* WITH_MPI */ -+ normmax = norm -+#endif /* WITH_MPI */ -+ -+ if (myid .eq. 0) then -+ print *," Maximum error of result: ", normmax -+ endif -+ -+#ifdef DOUBLE_PRECISION_REAL -+ if (normmax .gt. 5e-11_rk8 ) then -+ status = 1 -+ endif -+#else -+ if (normmax .gt. 5e-3_rk4 ) then -+ status = 1 -+ endif -+#endif -+ -+#ifdef DOUBLE_PRECISION_COMPLEX -+ if (normmax .gt. 5e-11_rk8 ) then -+ status = 1 -+ endif -+#else -+ if (normmax .gt. 5e-3_rk4 ) then -+ status = 1 -+ endif -+#endif -+ end function -+ -+ function check_correctness_eigenvalues_frank_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, ev, z, myid) result(status) -+ use iso_c_binding -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE :: status, i, j, myid -+ TEST_INT_TYPE, intent(in) :: na -+ real(kind=rck) :: ev_analytic(na), ev(na) -+ MATH_DATATYPE(kind=rck) :: z(:,:) -+ -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ real(kind=rck), parameter :: pi = 3.141592653589793238462643383279_c_double -+#else -+ real(kind=rck), parameter :: pi = 3.1415926535897932_c_float -+#endif -+ real(kind=rck) :: tmp, maxerr -+ TEST_INT_TYPE :: loctmp -+ status = 0 -+ -+ ! analytic solution -+ do i = 1, na -+ j = na - i -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ ev_analytic(i) = pi * (2.0_c_double * real(j,kind=c_double) + 1.0_c_double) / & -+ (2.0_c_double * real(na,kind=c_double) + 1.0_c_double) -+ ev_analytic(i) = 0.5_c_double / (1.0_c_double - cos(ev_analytic(i))) -+#else -+ ev_analytic(i) = pi * (2.0_c_float * real(j,kind=c_float) + 1.0_c_float) / & -+ (2.0_c_float * real(na,kind=c_float) + 1.0_c_float) -+ ev_analytic(i) = 0.5_c_float / (1.0_c_float - cos(ev_analytic(i))) -+#endif -+ enddo -+ -+ ! sort analytic solution: -+ -+ ! this hack is neither elegant, nor optimized: for huge matrixes it might be expensive -+ ! a proper sorting algorithmus might be implemented here -+ -+ tmp = minval(ev_analytic) -+ loctmp = minloc(ev_analytic, 1) -+ -+ ev_analytic(loctmp) = ev_analytic(1) -+ ev_analytic(1) = tmp -+ do i=2, na -+ tmp = ev_analytic(i) -+ do j= i, na -+ if (ev_analytic(j) .lt. tmp) then -+ tmp = ev_analytic(j) -+ loctmp = j -+ endif -+ enddo -+ ev_analytic(loctmp) = ev_analytic(i) -+ ev_analytic(i) = tmp -+ enddo -+ -+ ! compute a simple error max of eigenvalues -+ maxerr = 0.0 -+ maxerr = maxval( (ev(:) - ev_analytic(:))/ev_analytic(:) , 1) -+ -+#if defined(DOUBLE_PRECISION_REAL) || defined(DOUBLE_PRECISION_COMPLEX) -+ if (maxerr .gt. 8.e-13_c_double) then -+#else -+ if (maxerr .gt. 8.e-4_c_float) then -+#endif -+ status = 1 -+ if (myid .eq. 0) then -+ print *,"Result of Frank matrix test: " -+ print *,"Eigenvalues differ from analytic solution: maxerr = ",maxerr -+ endif -+ endif -+ end function -+ -+! vim: syntax=fortran -diff -ruN elpa-2020.11.001/examples/shared/test_output_type.F90 elpa-2020.11.001_ok/examples/shared/test_output_type.F90 ---- elpa-2020.11.001/examples/shared/test_output_type.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_output_type.F90 2021-02-02 12:54:50.049335000 +0100 -@@ -0,0 +1,11 @@ -+#include "config-f90.h" -+ -+module test_output_type -+ -+ type :: output_t -+ logical :: eigenvectors -+ logical :: eigenvalues -+ end type -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_precision_kinds.F90 elpa-2020.11.001_ok/examples/shared/test_precision_kinds.F90 ---- elpa-2020.11.001/examples/shared/test_precision_kinds.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_precision_kinds.F90 2021-02-02 12:54:50.048528000 +0100 -@@ -0,0 +1,25 @@ -+#ifdef REALCASE -+#ifdef DOUBLE_PRECISION -+ integer, parameter :: rk = C_DOUBLE -+ integer, parameter :: rck = C_DOUBLE -+#endif -+#ifdef SINGLE_PRECISION -+ integer, parameter :: rk = C_FLOAT -+ integer, parameter :: rck = C_FLOAT -+#endif -+ real(kind=rck), parameter :: ZERO=0.0_rk, ONE = 1.0_rk -+#endif -+ -+#ifdef COMPLEXCASE -+#ifdef DOUBLE_PRECISION -+ integer, parameter :: rk = C_DOUBLE -+ integer, parameter :: ck = C_DOUBLE_COMPLEX -+ integer, parameter :: rck = C_DOUBLE_COMPLEX -+#endif -+#ifdef SINGLE_PRECISION -+ integer, parameter :: rk = C_FLOAT -+ integer, parameter :: ck = C_FLOAT_COMPLEX -+ integer, parameter :: rck = C_FLOAT_COMPLEX -+#endif -+ complex(kind=rck), parameter :: ZERO = (0.0_rk,0.0_rk), ONE = (1.0_rk,0.0_rk) -+#endif -diff -ruN elpa-2020.11.001/examples/shared/test_prepare_matrix.F90 elpa-2020.11.001_ok/examples/shared/test_prepare_matrix.F90 ---- elpa-2020.11.001/examples/shared/test_prepare_matrix.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_prepare_matrix.F90 2021-02-02 12:54:50.056514000 +0100 -@@ -0,0 +1,145 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! Author: A. Marek, MPCDF -+#include "config-f90.h" -+ -+module test_prepare_matrix -+ -+ use precision_for_tests -+ interface prepare_matrix_random -+ module procedure prepare_matrix_random_complex_double -+ module procedure prepare_matrix_random_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_random_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_random_complex_single -+#endif -+ end interface -+ -+ -+ interface prepare_matrix_random_spd -+ module procedure prepare_matrix_random_spd_complex_double -+ module procedure prepare_matrix_random_spd_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_random_spd_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_random_spd_complex_single -+#endif -+ end interface -+ -+ -+ interface prepare_matrix_toeplitz -+ module procedure prepare_matrix_toeplitz_complex_double -+ module procedure prepare_matrix_toeplitz_real_double -+ module procedure prepare_matrix_toeplitz_mixed_complex_complex_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_toeplitz_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_toeplitz_complex_single -+ module procedure prepare_matrix_toeplitz_mixed_complex_complex_single -+#endif -+ end interface -+ -+ interface prepare_matrix_frank -+ module procedure prepare_matrix_frank_complex_double -+ module procedure prepare_matrix_frank_real_double -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure prepare_matrix_frank_real_single -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure prepare_matrix_frank_complex_single -+#endif -+ end interface -+ -+ -+ -+ private prows, pcols, map_global_array_index_to_local_index -+ -+ contains -+ -+#include "../../src/general/prow_pcol.F90" -+#include "../../src/general/map_global_to_local.F90" -+ -+#define COMPLEXCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_prepare_matrix_template.F90" -+#undef DOUBLE_PRECISION -+#undef COMPLEXCASE -+ -+ -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ -+ -+#define COMPLEXCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_prepare_matrix_template.F90" -+#undef SINGLE_PRECISION -+#undef COMPLEXCASE -+#endif /* WANT_SINGLE_PRECISION_COMPLEX */ -+ -+ -+#define REALCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_prepare_matrix_template.F90" -+#undef DOUBLE_PRECISION -+#undef REALCASE -+ -+#ifdef WANT_SINGLE_PRECISION_REAL -+ -+ -+#define REALCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_prepare_matrix_template.F90" -+#undef SINGLE_PRECISION -+#undef REALCASE -+ -+#endif /* WANT_SINGLE_PRECISION_REAL */ -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_prepare_matrix_template.F90 elpa-2020.11.001_ok/examples/shared/test_prepare_matrix_template.F90 ---- elpa-2020.11.001/examples/shared/test_prepare_matrix_template.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_prepare_matrix_template.F90 2021-02-02 12:54:50.043491000 +0100 -@@ -0,0 +1,510 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! Author: A. Marek, MPCDF -+ -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#define TEST_C_INT_TYPE_PTR long int* -+#define TEST_C_INT_TYPE long int -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#define TEST_C_INT_TYPE_PTR int* -+#define TEST_C_INT_TYPE int -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#define TEST_C_INT_MPI_TYPE_PTR long int* -+#define TEST_C_INT_MPI_TYPE long int -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#define TEST_C_INT_MPI_TYPE_PTR int* -+#define TEST_C_INT_MPI_TYPE int -+#endif -+ -+ -+ subroutine prepare_matrix_random_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as, is_skewsymmetric) -+ -+ -+ !use test_util -+ use tests_scalapack_interfaces -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: myid, na, sc_desc(:) -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:), a(:,:), as(:,:) -+ -+#if COMPLEXCASE == 1 -+ real(kind=rk) :: xr(size(a,dim=1), size(a,dim=2)) -+#endif /* COMPLEXCASE */ -+ -+ integer(kind=c_int), allocatable :: iseed(:) -+ integer(kind=c_int) :: n -+ integer(kind=c_int), intent(in), optional :: is_skewsymmetric -+ logical :: skewsymmetric -+ -+ if (present(is_skewsymmetric)) then -+ if (is_skewsymmetric .eq. 1) then -+ skewsymmetric = .true. -+ else -+ skewsymmetric = .false. -+ endif -+ else -+ skewsymmetric = .false. -+ endif -+ -+ ! for getting a hermitian test matrix A we get a random matrix Z -+ ! and calculate A = Z + Z**H -+ ! in case of a skewsymmetric matrix A = Z - Z**H -+ -+ ! we want different random numbers on every process -+ ! (otherwise A might get rank deficient): -+ -+ call random_seed(size=n) -+ allocate(iseed(n)) -+ iseed(:) = myid -+ call random_seed(put=iseed) -+#if REALCASE == 1 -+ call random_number(z) -+ -+ a(:,:) = z(:,:) -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+ call random_number(xr) -+ -+ z(:,:) = xr(:,:) -+ call RANDOM_NUMBER(xr) -+ z(:,:) = z(:,:) + (0.0_rk,1.0_rk)*xr(:,:) -+ a(:,:) = z(:,:) -+#endif /* COMPLEXCASE */ -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)' -+ endif -+ -+#if REALCASE == 1 -+#ifdef WITH_MPI -+ if (skewsymmetric) then -+ call p& -+ &BLAS_CHAR& -+ &tran(int(na,kind=BLAS_KIND), int(na,kind=BLAS_KIND), -ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) ! A = A + Z**T -+ else -+ call p& -+ &BLAS_CHAR& -+ &tran(int(na,kind=BLAS_KIND), int(na,kind=BLAS_KIND), ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) ! A = A + Z**T -+ endif -+#else /* WITH_MPI */ -+ if (skewsymmetric) then -+ a = a - transpose(z) -+ else -+ a = a + transpose(z) -+ endif -+#endif /* WITH_MPI */ -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef WITH_MPI -+ if (skewsymmetric) then -+ call p& -+ &BLAS_CHAR& -+ &tranc(int(na,kind=BLAS_KIND), int(na,kind=BLAS_KIND), -ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) ! A = A + Z**H -+ else -+ call p& -+ &BLAS_CHAR& -+ &tranc(int(na,kind=BLAS_KIND), int(na,kind=BLAS_KIND), ONE, z, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc, & -+ ONE, a, 1_BLAS_KIND, 1_BLAS_KIND, sc_desc) ! A = A + Z**H -+ endif -+#else /* WITH_MPI */ -+ if (skewsymmetric) then -+ a = a - transpose(conjg(z)) -+ else -+ a = a + transpose(conjg(z)) -+ endif -+#endif /* WITH_MPI */ -+#endif /* COMPLEXCASE */ -+ -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been symmetrized' -+ endif -+ -+ ! save original matrix A for later accuracy checks -+ -+ as = a -+ -+ deallocate(iseed) -+ -+ end subroutine -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> void prepare_matrix_random_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> double *a, double *z, double *as); -+#else -+ !c> void prepare_matrix_random_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> float *a, float *z, float *as); -+#endif -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+ !c> void prepare_matrix_random_complex_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> complex double *a, complex double *z, complex double *as); -+#else -+ !c> void prepare_matrix_random_complex_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> complex float *a, complex float *z, complex float *as); -+#endif -+#endif /* COMPLEXCASE */ -+ -+subroutine prepare_matrix_random_& -+&MATH_DATATYPE& -+&_wrapper_& -+&PRECISION& -+& (na, myid, na_rows, na_cols, sc_desc, a, z, as) & -+ bind(C, name="prepare_matrix_random_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ use iso_c_binding -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE , value :: myid, na, na_rows, na_cols -+ TEST_INT_TYPE :: sc_desc(1:9) -+ MATH_DATATYPE(kind=rck) :: z(1:na_rows,1:na_cols), a(1:na_rows,1:na_cols), & -+ as(1:na_rows,1:na_cols) -+ call prepare_matrix_random_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as) -+ end subroutine -+ -+!---------------------------------------------------------------------------------------------------------------- -+ -+ subroutine prepare_matrix_random_spd_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ -+ !use test_util -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ TEST_INT_TYPE, intent(in) :: myid, na, sc_desc(:) -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:), a(:,:), as(:,:) -+ TEST_INT_TYPE, intent(in) :: nblk, np_rows, np_cols, my_prow, my_pcol -+ -+ TEST_INT_TYPE :: ii -+ integer(kind=c_int) :: rowLocal, colLocal -+ -+ -+ call prepare_matrix_random_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as) -+ -+ ! hermitian diagonaly dominant matrix => positive definite -+ do ii=1, na -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii,kind=c_int), & -+ rowLocal, colLocal, & -+ int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) )) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = & -+ real(a(int(rowLocal,kind=INT_TYPE), int(colLocal,kind=INT_TYPE))) + na + 1 -+ end if -+ end do -+ -+ as = a -+ -+ end subroutine -+ -+#if REALCASE == 1 -+#ifdef DOUBLE_PRECISION_REAL -+ !c> void prepare_matrix_random_spd_real_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> double *a, double *z, double *as, -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> void prepare_matrix_random_spd_real_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> float *a, float *z, float *as, -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE np_rows, TEST_C_INT_TYPE np_cols, -+ !c> TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* REALCASE */ -+ -+#if COMPLEXCASE == 1 -+#ifdef DOUBLE_PRECISION_COMPLEX -+ !c> void prepare_matrix_random_spd_complex_double_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> complex double *a, complex double *z, complex double *as, -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#else -+ !c> void prepare_matrix_random_spd_complex_single_f(TEST_C_INT_TYPE na, TEST_C_INT_TYPE myid, TEST_C_INT_TYPE na_rows, -+ !c> TEST_C_INT_TYPE na_cols, TEST_C_INT_TYPE sc_desc[9], -+ !c> complex float *a, complex float *z, complex float *as, -+ !c> TEST_C_INT_TYPE nblk, TEST_C_INT_TYPE np_rows, -+ !c> TEST_C_INT_TYPE np_cols, TEST_C_INT_TYPE my_prow, TEST_C_INT_TYPE my_pcol); -+#endif -+#endif /* COMPLEXCASE */ -+ -+subroutine prepare_matrix_random_spd_& -+&MATH_DATATYPE& -+&_wrapper_& -+&PRECISION& -+& (na, myid, na_rows, na_cols, sc_desc, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) & -+ bind(C, name="prepare_matrix_random_spd_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ &_f") -+ use iso_c_binding -+ -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE , value :: myid, na, na_rows, na_cols -+ TEST_INT_TYPE :: sc_desc(1:9) -+ MATH_DATATYPE(kind=rck) :: z(1:na_rows,1:na_cols), a(1:na_rows,1:na_cols), & -+ as(1:na_rows,1:na_cols) -+ TEST_INT_TYPE , value :: nblk, np_rows, np_cols, my_prow, my_pcol -+ call prepare_matrix_random_spd_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, myid, sc_desc, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ end subroutine -+ -+ -+!---------------------------------------------------------------------------------------------------------------- -+ -+ subroutine prepare_matrix_toeplitz_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, diagonalElement, subdiagonalElement, d, sd, ds, sds, a, as, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+ !use test_util -+ use precision_for_tests -+ implicit none -+#include "./test_precision_kinds.F90" -+ -+ TEST_INT_TYPE, intent(in) :: na, nblk, np_rows, np_cols, my_prow, my_pcol -+ MATH_DATATYPE(kind=rck) :: diagonalElement, subdiagonalElement -+ MATH_DATATYPE(kind=rck) :: d(:), sd(:), ds(:), sds(:) -+ MATH_DATATYPE(kind=rck) :: a(:,:), as(:,:) -+ -+ TEST_INT_TYPE :: ii -+ integer(kind=c_int) :: rowLocal, colLocal -+ -+ d(:) = diagonalElement -+ sd(:) = subdiagonalElement -+ a(:,:) = ZERO -+ -+ ! set up the diagonal and subdiagonals (for general solver test) -+ do ii=1, na ! for diagonal elements -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) ) ) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = diagonalElement -+ endif -+ enddo -+ do ii=1, na-1 -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii+1,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) ) ) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = subdiagonalElement -+ endif -+ enddo -+ -+ do ii=2, na -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii-1,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), int(np_rows,kind=c_int), & -+ int(np_cols,kind=c_int), int(my_prow,kind=c_int), & -+ int(my_pcol,kind=c_int) ) ) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = subdiagonalElement -+ endif -+ enddo -+ -+ ds = d -+ sds = sd -+ as = a -+ end subroutine -+ -+ subroutine prepare_matrix_toeplitz_mixed_complex& -+ &_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+#if COMPLEXCASE == 1 -+ & (na, diagonalElement, subdiagonalElement, d, sd, ds, sds, a, as, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+#endif -+#if REALCASE == 1 -+ & (na, diagonalElement, subdiagonalElement, d, sd, ds, sds, & -+ nblk, np_rows, np_cols, my_prow, my_pcol) -+#endif -+ !use test_util -+ implicit none -+ -+ TEST_INT_TYPE, intent(in) :: na, nblk, np_rows, np_cols, my_prow, my_pcol -+ real(kind=C_DATATYPE_KIND) :: diagonalElement, subdiagonalElement -+ -+ real(kind=C_DATATYPE_KIND) :: d(:), sd(:), ds(:), sds(:) -+ -+#if COMPLEXCASE == 1 -+ complex(kind=C_DATATYPE_KIND) :: a(:,:), as(:,:) -+#endif -+#if REALCASE == 1 -+#endif -+ -+ TEST_INT_TYPE :: ii -+ integer(kind=c_int) :: rowLocal, colLocal -+#if COMPLEXCASE == 1 -+ d(:) = diagonalElement -+ sd(:) = subdiagonalElement -+ -+ ! set up the diagonal and subdiagonals (for general solver test) -+ do ii=1, na ! for diagonal elements -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), & -+ int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = diagonalElement -+ endif -+ enddo -+ do ii=1, na-1 -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii+1,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), & -+ int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = subdiagonalElement -+ endif -+ enddo -+ -+ do ii=2, na -+ if (map_global_array_index_to_local_index(int(ii,kind=c_int), int(ii-1,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), & -+ int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = subdiagonalElement -+ endif -+ enddo -+ -+ ds = d -+ sds = sd -+ as = a -+#endif -+ end subroutine -+ -+ subroutine prepare_matrix_frank_& -+ &MATH_DATATYPE& -+ &_& -+ &PRECISION& -+ & (na, a, z, as, nblk, np_rows, np_cols, my_prow, my_pcol) -+ !use test_util -+ use precision_for_tests -+ implicit none -+ -+ TEST_INT_TYPE, intent(in) :: na, nblk, np_rows, np_cols, my_prow, my_pcol -+ -+#if REALCASE == 1 -+ real(kind=C_DATATYPE_KIND) :: a(:,:), z(:,:), as(:,:) -+#endif -+#if COMPLEXCASE == 1 -+ complex(kind=C_DATATYPE_KIND) :: a(:,:), z(:,:), as(:,:) -+#endif -+ -+ TEST_INT_TYPE :: i, j -+ integer(kind=c_int) :: rowLocal, colLocal -+ -+ do i = 1, na -+ do j = 1, na -+ if (map_global_array_index_to_local_index(int(i,kind=c_int), int(j,kind=c_int), rowLocal, & -+ colLocal, int(nblk,kind=c_int), & -+ int(np_rows,kind=c_int), int(np_cols,kind=c_int), & -+ int(my_prow,kind=c_int), int(my_pcol,kind=c_int) )) then -+ if (j .le. i) then -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = & -+ real((na+1-i), kind=C_DATATYPE_KIND) / real(na, kind=C_DATATYPE_KIND) -+ else -+ a(int(rowLocal,kind=INT_TYPE),int(colLocal,kind=INT_TYPE)) = & -+ real((na+1-j), kind=C_DATATYPE_KIND) / real(na, kind=C_DATATYPE_KIND) -+ endif -+ endif -+ enddo -+ enddo -+ -+ z(:,:) = a(:,:) -+ as(:,:) = a(:,:) -+ -+ end subroutine -+ -+ -+! vim: syntax=fortran -diff -ruN elpa-2020.11.001/examples/shared/test_read_input_parameters.F90 elpa-2020.11.001_ok/examples/shared/test_read_input_parameters.F90 ---- elpa-2020.11.001/examples/shared/test_read_input_parameters.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_read_input_parameters.F90 2021-02-02 12:54:50.051994000 +0100 -@@ -0,0 +1,455 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+ -+module test_read_input_parameters -+ use elpa, only : ELPA_2STAGE_COMPLEX_DEFAULT, ELPA_2STAGE_REAL_DEFAULT, elpa_int_string_to_value, & -+ elpa_int_value_to_string, ELPA_OK -+ use elpa_utilities, only : error_unit -+ use iso_c_binding -+ use test_util, only : x_ao, x_a -+ use test_output_type -+ -+ implicit none -+ -+ type input_options_t -+ TEST_INT_TYPE :: datatype -+ TEST_INT_TYPE :: na, nev, nblk -+ type(output_t) :: write_to_file -+ TEST_INT_TYPE :: this_real_kernel, this_complex_kernel -+ logical :: realKernelIsSet, complexKernelIsSet -+ TEST_INT_TYPE :: useQrIsSet, useGPUIsSet -+ logical :: doSolveTridi, do1stage, do2stage, justHelpMessage, & -+ doCholesky, doInvertTrm, doTransposeMultiply -+ end type -+ -+ interface read_input_parameters -+ module procedure read_input_parameters_general -+ module procedure read_input_parameters_traditional -+ module procedure read_input_parameters_traditional_noskip -+ end interface -+ -+ contains -+ -+ subroutine parse_arguments(command_line_argument, input_options) -+ implicit none -+ -+ type(input_options_t) :: input_options -+ character(len=128) :: command_line_argument -+ integer(kind=c_int) :: elpa_error -+ -+ if (command_line_argument == "--help") then -+ print *,"usage: elpa_tests [--help] [datatype={real|complex}] [na=number] [nev=number] " -+ print *," [nblk=size of block cyclic distribution] [--output_eigenvalues]" -+ print *," [--output_eigenvectors] [--real-kernel=name_of_kernel]" -+ print *," [--complex-kernel=name_of_kernel] [--use-gpu={0|1}]" -+ print *," [--use-qr={0,1}] [--tests={all|solve-tridi|1stage|2stage|cholesky& -+ &|invert-triangular|transpose-mulitply}]" -+ input_options%justHelpMessage=.true. -+ return -+ endif -+ -+ -+ if (command_line_argument(1:11) == "--datatype=") then -+ if (command_line_argument(12:15) == "real") then -+ input_options%datatype=1 -+ else -+ if (command_line_argument(12:18) == "complex") then -+ input_options%datatype=2 -+ else -+ print *,"datatype unknown! use either --datatype=real or --datatpye=complex" -+ stop 1 -+ endif -+ endif -+ endif -+ -+ if (command_line_argument(1:3) == "na=") then -+ read(command_line_argument(4:), *) input_options%na -+ endif -+ if (command_line_argument(1:4) == "nev=") then -+ read(command_line_argument(5:), *) input_options%nev -+ endif -+ if (command_line_argument(1:5) == "nblk=") then -+ read(command_line_argument(6:), *) input_options%nblk -+ endif -+ -+ if (command_line_argument(1:21) == "--output_eigenvectors") then -+ input_options%write_to_file%eigenvectors = .true. -+ endif -+ -+ if (command_line_argument(1:20) == "--output_eigenvalues") then -+ input_options%write_to_file%eigenvalues = .true. -+ endif -+ -+ if (command_line_argument(1:14) == "--real-kernel=") then -+ input_options%this_real_kernel = int(elpa_int_string_to_value("real_kernel", & -+ command_line_argument(15:), elpa_error), & -+ kind=INT_TYPE) -+ if (elpa_error /= ELPA_OK) then -+ print *, "Invalid argument for --real-kernel" -+ stop 1 -+ endif -+ print *,"Setting ELPA2 real kernel to ", elpa_int_value_to_string("real_kernel", & -+ int(input_options%this_real_kernel,kind=c_int)) -+ input_options%realKernelIsSet = .true. -+ endif -+ -+ if (command_line_argument(1:17) == "--complex-kernel=") then -+ input_options%this_complex_kernel = int(elpa_int_string_to_value("complex_kernel", & -+ command_line_argument(18:), elpa_error), kind=INT_TYPE) -+ if (elpa_error /= ELPA_OK) then -+ print *, "Invalid argument for --complex-kernel" -+ stop 1 -+ endif -+ print *,"Setting ELPA2 complex kernel to ", elpa_int_value_to_string("complex_kernel", & -+ int(input_options%this_complex_kernel,kind=c_int)) -+ input_options%complexKernelIsSet = .true. -+ endif -+ -+ if (command_line_argument(1:9) == "--use-qr=") then -+ read(command_line_argument(10:), *) input_options%useQrIsSet -+ endif -+ -+ if (command_line_argument(1:10) == "--use-gpu=") then -+ read(command_line_argument(11:), *) input_options%useGPUIsSet -+ endif -+ -+ if (command_line_argument(1:8) == "--tests=") then -+ if (command_line_argument(9:11) == "all") then -+ input_options%doSolveTridi=.true. -+ input_options%do1stage=.true. -+ input_options%do2stage=.true. -+ input_options%doCholesky=.true. -+ input_options%doInvertTrm=.true. -+ input_options%doTransposeMultiply=.true. -+ else if (command_line_argument(9:19) == "solve-tride") then -+ input_options%doSolveTridi=.true. -+ input_options%do1stage=.false. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:14) == "1stage") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.true. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:14) == "2stage") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.false. -+ input_options%do2stage=.true. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:16) == "cholesky") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.false. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.true. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:25) == "invert-triangular") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.false. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.true. -+ input_options%doTransposeMultiply=.false. -+ else if (command_line_argument(9:26) == "transpose-multiply") then -+ input_options%doSolveTridi=.false. -+ input_options%do1stage=.false. -+ input_options%do2stage=.false. -+ input_options%doCholesky=.false. -+ input_options%doInvertTrm=.false. -+ input_options%doTransposeMultiply=.true. -+ else -+ print *,"unknown test specified" -+ stop 1 -+ endif -+ endif -+ -+ end subroutine -+ -+ subroutine read_input_parameters_general(input_options) -+ use precision_for_tests -+ implicit none -+ -+ type(input_options_t) :: input_options -+ -+ ! Command line arguments -+ character(len=128) :: arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10 -+ -+ ! default parameters -+ input_options%datatype = 1 -+ input_options%na = 500 -+ input_options%nev = 150 -+ input_options%nblk = 16 -+ -+ input_options%write_to_file%eigenvectors = .false. -+ input_options%write_to_file%eigenvalues = .false. -+ -+ input_options%this_real_kernel = ELPA_2STAGE_REAL_DEFAULT -+ input_options%this_complex_kernel = ELPA_2STAGE_COMPLEX_DEFAULT -+ input_options%realKernelIsSet = .false. -+ input_options%complexKernelIsSet = .false. -+ -+ input_options%useQrIsSet = 0 -+ -+ input_options%useGPUIsSet = 0 -+ -+ input_options%do1Stage = .true. -+ input_options%do2Stage = .true. -+ input_options%doSolveTridi = .true. -+ input_options%doCholesky=.true. -+ input_options%doInvertTrm=.true. -+ input_options%doTransposeMultiply=.true. -+ input_options%justHelpMessage=.false. -+ -+ ! test na=1500 nev=50 nblk=16 --help --kernel --output_eigenvectors --output_eigenvalues -+ if (COMMAND_ARGUMENT_COUNT() .gt. 8) then -+ write(error_unit, '(a,i0,a)') "Invalid number (", COMMAND_ARGUMENT_COUNT(), ") of command line arguments!" -+ stop 1 -+ endif -+ -+ if (COMMAND_ARGUMENT_COUNT() .gt. 0) then -+ -+ call get_COMMAND_ARGUMENT(1, arg1) -+ -+ call parse_arguments(arg1, input_options) -+ -+ -+ -+ if (COMMAND_ARGUMENT_COUNT() .ge. 2) then -+ ! argument 2 -+ call get_COMMAND_ARGUMENT(2, arg2) -+ -+ call parse_arguments(arg2, input_options) -+ endif -+ -+ ! argument 3 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 3) then -+ -+ call get_COMMAND_ARGUMENT(3, arg3) -+ -+ call parse_arguments(arg3, input_options) -+ endif -+ -+ ! argument 4 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 4) then -+ -+ call get_COMMAND_ARGUMENT(4, arg4) -+ -+ call parse_arguments(arg4, input_options) -+ -+ endif -+ -+ ! argument 5 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 5) then -+ -+ call get_COMMAND_ARGUMENT(5, arg5) -+ -+ call parse_arguments(arg5, input_options) -+ endif -+ -+ ! argument 6 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 6) then -+ -+ call get_COMMAND_ARGUMENT(6, arg6) -+ -+ call parse_arguments(arg6, input_options) -+ endif -+ -+ ! argument 7 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 7) then -+ -+ call get_COMMAND_ARGUMENT(7, arg7) -+ -+ call parse_arguments(arg7, input_options) -+ -+ endif -+ -+ ! argument 8 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 8) then -+ -+ call get_COMMAND_ARGUMENT(8, arg8) -+ -+ call parse_arguments(arg8, input_options) -+ -+ endif -+ -+ ! argument 9 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 9) then -+ -+ call get_COMMAND_ARGUMENT(9, arg9) -+ -+ call parse_arguments(arg8, input_options) -+ -+ endif -+ -+ ! argument 10 -+ if (COMMAND_ARGUMENT_COUNT() .ge. 10) then -+ -+ call get_COMMAND_ARGUMENT(10, arg10) -+ -+ call parse_arguments(arg8, input_options) -+ -+ endif -+ -+ endif -+ -+ if (input_options%useQrIsSet .eq. 1 .and. input_options%datatype .eq. 2) then -+ print *,"You cannot use QR-decomposition in complex case" -+ stop 1 -+ endif -+ -+ end subroutine -+ -+ subroutine read_input_parameters_traditional_noskip(na, nev, nblk, write_to_file) -+ use precision_for_tests -+ implicit none -+ -+ TEST_INT_TYPE, intent(out) :: na, nev, nblk -+ -+ type(output_t), intent(out) :: write_to_file -+ logical :: skip_check_correctness -+ -+ call read_input_parameters_traditional(na, nev, nblk, write_to_file, skip_check_correctness) -+ end subroutine -+ -+ subroutine read_input_parameters_traditional(na, nev, nblk, write_to_file, skip_check_correctness) -+ use precision_for_tests -+ implicit none -+ -+ TEST_INT_TYPE, intent(out) :: na, nev, nblk -+ -+ type(output_t), intent(out) :: write_to_file -+ logical, intent(out) :: skip_check_correctness -+ -+ ! Command line arguments -+ character(len=128) :: arg1, arg2, arg3, arg4, arg5 -+ -+ ! default parameters -+ na = 5000 -+ nev = 150 -+ nblk = 16 -+ write_to_file%eigenvectors = .false. -+ write_to_file%eigenvalues = .false. -+ skip_check_correctness = .false. -+ -+ if (.not. any(COMMAND_ARGUMENT_COUNT() == [0, 3, 4, 5])) then -+ write(error_unit, '(a,i0,a)') "Invalid number (", COMMAND_ARGUMENT_COUNT(), ") of command line arguments!" -+ write(error_unit, *) "Expected: program [ [matrix_size num_eigenvalues block_size] & -+ ""output_eigenvalues"" ""output_eigenvectors""]" -+ stop 1 -+ endif -+ -+ if (COMMAND_ARGUMENT_COUNT() == 3) then -+ call GET_COMMAND_ARGUMENT(1, arg1) -+ call GET_COMMAND_ARGUMENT(2, arg2) -+ call GET_COMMAND_ARGUMENT(3, arg3) -+ -+ read(arg1, *) na -+ read(arg2, *) nev -+ read(arg3, *) nblk -+ endif -+ -+ if (COMMAND_ARGUMENT_COUNT() == 4) then -+ call GET_COMMAND_ARGUMENT(1, arg1) -+ call GET_COMMAND_ARGUMENT(2, arg2) -+ call GET_COMMAND_ARGUMENT(3, arg3) -+ call GET_COMMAND_ARGUMENT(4, arg4) -+ read(arg1, *) na -+ read(arg2, *) nev -+ read(arg3, *) nblk -+ -+ if (arg4 .eq. "output_eigenvalues") then -+ write_to_file%eigenvalues = .true. -+ elseif (arg4 .eq. "skip_check_correctness") then -+ skip_check_correctness = .true. -+ else -+ write(error_unit, *) & -+ "Invalid value for parameter 4. Must be ""output_eigenvalues"", ""skip_check_correctness"" or omitted" -+ stop 1 -+ endif -+ -+ endif -+ -+ if (COMMAND_ARGUMENT_COUNT() == 5) then -+ call GET_COMMAND_ARGUMENT(1, arg1) -+ call GET_COMMAND_ARGUMENT(2, arg2) -+ call GET_COMMAND_ARGUMENT(3, arg3) -+ call GET_COMMAND_ARGUMENT(4, arg4) -+ call GET_COMMAND_ARGUMENT(5, arg5) -+ read(arg1, *) na -+ read(arg2, *) nev -+ read(arg3, *) nblk -+ -+ if (arg4 .eq. "output_eigenvalues") then -+ write_to_file%eigenvalues = .true. -+ else -+ write(error_unit, *) "Invalid value for output flag! Must be ""output_eigenvalues"" or omitted" -+ stop 1 -+ endif -+ -+ if (arg5 .eq. "output_eigenvectors") then -+ write_to_file%eigenvectors = .true. -+ else -+ write(error_unit, *) "Invalid value for output flag! Must be ""output_eigenvectors"" or omitted" -+ stop 1 -+ endif -+ -+ endif -+ end subroutine -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_redir.c elpa-2020.11.001_ok/examples/shared/test_redir.c ---- elpa-2020.11.001/examples/shared/test_redir.c 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_redir.c 2021-02-02 12:54:50.055869000 +0100 -@@ -0,0 +1,125 @@ -+// This file is part of ELPA. -+// -+// The ELPA library was originally created by the ELPA consortium, -+// consisting of the following organizations: -+// -+// - Max Planck Computing and Data Facility (MPCDF), formerly known as -+// Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+// - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+// Informatik, -+// - Technische Universität München, Lehrstuhl für Informatik mit -+// Schwerpunkt Wissenschaftliches Rechnen , -+// - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+// - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+// Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+// and -+// - IBM Deutschland GmbH -+// -+// -+// More information can be found here: -+// http://elpa.mpcdf.mpg.de/ -+// -+// ELPA is free software: you can redistribute it and/or modify -+// it under the terms of the version 3 of the license of the -+// GNU Lesser General Public License as published by the Free -+// Software Foundation. -+// -+// ELPA is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU Lesser General Public License for more details. -+// -+// You should have received a copy of the GNU Lesser General Public License -+// along with ELPA. If not, see <http://www.gnu.org/licenses/> -+// -+// ELPA reflects a substantial effort on the part of the original -+// ELPA consortium, and we ask you to respect the spirit of the -+// license that we chose: i.e., please contribute any changes you -+// may have back to the original ELPA library distribution, and keep -+// any derivatives of ELPA under the same license that we chose for -+// the original distribution, the GNU Lesser General Public License. -+// -+// -+// -------------------------------------------------------------------------------------------------- -+#include <stdio.h> -+#include <fcntl.h> -+#include <stdlib.h> -+#include <unistd.h> -+#include <sys/stat.h> -+#include <sys/types.h> -+#include <unistd.h> -+#include <errno.h> -+ -+#define NAME_LENGTH 4096 -+#define FILENAME "./mpi_stdout/std%3s_rank%04d.txt" -+ -+FILE *tout, *terr; -+void dup_filename(char *filename, int dupfd); -+void dup_fd(int fd, int dupfd); -+ -+int _mkdirifnotexists(const char *dir) { -+ struct stat s; -+ if (stat(dir, &s) != 0) { -+ if (errno == ENOENT) { -+ if (mkdir(dir, 0755) != 0) { -+ perror("mkdir"); -+ return 0; -+ } else { -+ return 1; -+ } -+ } else { -+ perror("stat()"); -+ return 0; -+ } -+ } else if (!S_ISDIR(s.st_mode)) { -+ fprintf(stderr, "\"%s\" does exist and is not a directory\n", dir); -+ return 0; -+ } else { -+ return 1; -+ } -+} -+ -+int create_directories(void) { -+ if (!_mkdirifnotexists("mpi_stdout")) return 0; -+ return 1; -+} -+ -+void redirect_stdout(int *myproc) { -+ char buf[NAME_LENGTH]; -+ -+ if (*myproc == 0) { -+ snprintf(buf, NAME_LENGTH, "tee " FILENAME, "out", *myproc); -+ tout = popen(buf, "w"); -+ dup_fd(fileno(tout), 1); -+ -+ snprintf(buf, NAME_LENGTH, "tee " FILENAME, "err", *myproc); -+ terr = popen(buf, "w"); -+ dup_fd(fileno(terr), 2); -+ } else { -+ snprintf(buf, NAME_LENGTH, FILENAME, "out", *myproc); -+ dup_filename(buf, 1); -+ -+ snprintf(buf, NAME_LENGTH, FILENAME, "err", *myproc); -+ dup_filename(buf, 2); -+ } -+ -+ return; -+} -+ -+/* Redirect file descriptor dupfd to file filename */ -+void dup_filename(char *filename, int dupfd) { -+ int fd = open(filename, O_WRONLY | O_CREAT | O_TRUNC, 0644); -+ if(fd < 0) { -+ perror("open()"); -+ exit(1); -+ } -+ dup_fd(fd, dupfd); -+} -+ -+/* Redirect file descriptor dupfd to file descriptor fd */ -+void dup_fd(int fd, int dupfd) { -+ if(dup2(fd,dupfd) < 0) { -+ perror("dup2()"); -+ exit(1); -+ } -+} -diff -ruN elpa-2020.11.001/examples/shared/test_redirect.F90 elpa-2020.11.001_ok/examples/shared/test_redirect.F90 ---- elpa-2020.11.001/examples/shared/test_redirect.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_redirect.F90 2021-02-02 12:54:50.047904000 +0100 -@@ -0,0 +1,116 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+module test_redirect -+ use, intrinsic :: iso_c_binding -+ -+ implicit none -+ public -+ -+ logical :: use_redirect_stdout -+ -+ interface -+ integer(kind=C_INT) function create_directories_c() bind(C, name="create_directories") -+ use, intrinsic :: iso_c_binding -+ implicit none -+ end function -+ end interface -+ -+ interface -+ subroutine redirect_stdout_c(myproc) bind(C, name="redirect_stdout") -+ use, intrinsic :: iso_c_binding -+ implicit none -+ integer(kind=C_INT), intent(in) :: myproc -+ end subroutine -+ end interface -+ -+ contains -+!> -+!> This function is the Fortran driver for the -+!> C program to create the redirect output -+!> directory -+!> -+!> \param none -+!> \result res integer indicates success or failure -+ function create_directories() result(res) -+ implicit none -+ integer(kind=C_INT) :: res -+ res = int(create_directories_c()) -+ end function -+!> -+!> This subroutine is the Fortran driver for the -+!> redirection of stdout and stderr of each MPI -+!> task -+!> -+!> \param myproc MPI task id -+ subroutine redirect_stdout(myproc) -+ use, intrinsic :: iso_c_binding -+ implicit none -+ integer(kind=C_INT), intent(in) :: myproc -+ call redirect_stdout_c(int(myproc, kind=C_INT)) -+ end subroutine -+!> -+!> This function checks, whether the environment variable -+!> "REDIRECT_ELPA_TEST_OUTPUT" is set to "true". -+!> Returns ".true." if variable is set, otherwise ".false." -+!> This function only works if the during the build process -+!> "HAVE_ENVIRONMENT_CHECKING" was tested successfully -+!> -+!> \param none -+!> \return logical -+ function check_redirect_environment_variable() result(redirect) -+ implicit none -+ logical :: redirect -+ character(len=255) :: REDIRECT_VARIABLE -+ -+ redirect = .false. -+ -+#if defined(HAVE_ENVIRONMENT_CHECKING) -+ call get_environment_variable("REDIRECT_ELPA_TEST_OUTPUT",REDIRECT_VARIABLE) -+#endif -+ if (trim(REDIRECT_VARIABLE) .eq. "true") redirect = .true. -+ -+ end function -+ -+end module test_redirect -diff -ruN elpa-2020.11.001/examples/shared/test_scalapack.F90 elpa-2020.11.001_ok/examples/shared/test_scalapack.F90 ---- elpa-2020.11.001/examples/shared/test_scalapack.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_scalapack.F90 2021-02-02 12:54:50.057113000 +0100 -@@ -0,0 +1,111 @@ -+! (c) Copyright Pavel Kus, 2017, MPCDF -+! -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+ -+#include "../Fortran/assert.h" -+#include "config-f90.h" -+ -+module test_scalapack -+ use test_util -+ -+ interface solve_scalapack_all -+ module procedure solve_pdsyevd -+ module procedure solve_pzheevd -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure solve_pssyevd -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure solve_pcheevd -+#endif -+ end interface -+ -+ interface solve_scalapack_part -+ module procedure solve_pdsyevr -+ module procedure solve_pzheevr -+#ifdef WANT_SINGLE_PRECISION_REAL -+ module procedure solve_pssyevr -+#endif -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ module procedure solve_pcheevr -+#endif -+ end interface -+ -+contains -+ -+#define COMPLEXCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_scalapack_template.F90" -+#undef DOUBLE_PRECISION -+#undef COMPLEXCASE -+ -+#ifdef WANT_SINGLE_PRECISION_COMPLEX -+ -+#define COMPLEXCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_scalapack_template.F90" -+#undef SINGLE_PRECISION -+#undef COMPLEXCASE -+ -+#endif /* WANT_SINGLE_PRECISION_COMPLEX */ -+ -+#define REALCASE 1 -+#define DOUBLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_scalapack_template.F90" -+#undef DOUBLE_PRECISION -+#undef REALCASE -+ -+#ifdef WANT_SINGLE_PRECISION_REAL -+ -+#define REALCASE 1 -+#define SINGLE_PRECISION 1 -+#include "../../src/general/precision_macros.h" -+#include "test_scalapack_template.F90" -+#undef SINGLE_PRECISION -+#undef REALCASE -+ -+#endif /* WANT_SINGLE_PRECISION_REAL */ -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/test_scalapack_template.F90 elpa-2020.11.001_ok/examples/shared/test_scalapack_template.F90 ---- elpa-2020.11.001/examples/shared/test_scalapack_template.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_scalapack_template.F90 2021-02-02 12:54:50.052650000 +0100 -@@ -0,0 +1,161 @@ -+! (c) Copyright Pavel Kus, 2017, MPCDF -+! -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+ -+ ! compute all eigenvectors -+ subroutine solve_p& -+ &BLAS_CHAR_AND_SY_OR_HE& -+ &evd(na, a, sc_desc, ev, z) -+ implicit none -+#include "./test_precision_kinds.F90" -+ integer(kind=ik), intent(in) :: na -+ MATH_DATATYPE(kind=rck), intent(in) :: a(:,:) -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:) -+ real(kind=rk), intent(inout) :: ev(:) -+ integer(kind=ik), intent(in) :: sc_desc(:) -+ integer(kind=ik) :: info, lwork, liwork, lrwork -+ MATH_DATATYPE(kind=rck), allocatable :: work(:) -+ real(kind=rk), allocatable :: rwork(:) -+ integer, allocatable :: iwork(:) -+ -+ allocate(work(1), iwork(1), rwork(1)) -+ -+ ! query for required workspace -+#ifdef REALCASE -+ call p& -+ &BLAS_CHAR& -+ &syevd('V', 'U', na, a, 1, 1, sc_desc, ev, z, 1, 1, sc_desc, work, -1, iwork, -1, info) -+#endif -+#ifdef COMPLEXCASE -+ call p& -+ &BLAS_CHAR& -+ &heevd('V', 'U', na, a, 1, 1, sc_desc, ev, z, 1, 1, sc_desc, work, -1, rwork, -1, iwork, -1, info) -+#endif -+ ! write(*,*) "computed sizes", lwork, liwork, "required sizes ", work(1), iwork(1) -+ lwork = work(1) -+ liwork = iwork(1) -+ deallocate(work, iwork) -+ allocate(work(lwork), stat = info) -+ allocate(iwork(liwork), stat = info) -+#ifdef COMPLEXCASE -+ lrwork = rwork(1) -+ deallocate(rwork) -+ allocate(rwork(lrwork), stat = info) -+#endif -+ ! the actuall call to the method -+#ifdef REALCASE -+ call p& -+ &BLAS_CHAR& -+ &syevd('V', 'U', na, a, 1, 1, sc_desc, ev, z, 1, 1, sc_desc, work, lwork, iwork, liwork, info) -+#endif -+#ifdef COMPLEXCASE -+ call p& -+ &BLAS_CHAR& -+ &heevd('V', 'U', na, a, 1, 1, sc_desc, ev, z, 1, 1, sc_desc, work, lwork, rwork, lrwork, iwork, liwork, info) -+#endif -+ -+ deallocate(iwork, work, rwork) -+ end subroutine -+ -+ -+ ! compute part of eigenvectors -+ subroutine solve_p& -+ &BLAS_CHAR_AND_SY_OR_HE& -+ &evr(na, a, sc_desc, nev, ev, z) -+ implicit none -+#include "./test_precision_kinds.F90" -+ integer(kind=ik), intent(in) :: na, nev -+ MATH_DATATYPE(kind=rck), intent(in) :: a(:,:) -+ MATH_DATATYPE(kind=rck), intent(inout) :: z(:,:) -+ real(kind=rk), intent(inout) :: ev(:) -+ integer(kind=ik), intent(in) :: sc_desc(:) -+ integer(kind=ik) :: info, lwork, liwork, lrwork -+ MATH_DATATYPE(kind=rck), allocatable :: work(:) -+ real(kind=rk), allocatable :: rwork(:) -+ integer, allocatable :: iwork(:) -+ integer(kind=ik) :: comp_eigenval, comp_eigenvec, smallest_ev_idx, largest_ev_idx -+ -+ allocate(work(1), iwork(1), rwork(1)) -+ smallest_ev_idx = 1 -+ largest_ev_idx = nev -+ ! query for required workspace -+#ifdef REALCASE -+ call p& -+ &BLAS_CHAR& -+ &syevr('V', 'I', 'U', na, a, 1, 1, sc_desc, 0.0_rk, 0.0_rk, smallest_ev_idx, largest_ev_idx, & -+ comp_eigenval, comp_eigenvec, ev, z, 1, 1, sc_desc, work, -1, iwork, -1, info) -+#endif -+#ifdef COMPLEXCASE -+ call p& -+ &BLAS_CHAR& -+ &heevr('V', 'I', 'U', na, a, 1, 1, sc_desc, 0.0_rk, 0.0_rk, smallest_ev_idx, largest_ev_idx, & -+ comp_eigenval, comp_eigenvec, ev, z, 1, 1, sc_desc, work, -1, rwork, -1, iwork, -1, info) -+#endif -+ ! write(*,*) "computed sizes", lwork, liwork, "required sizes ", work(1), iwork(1) -+ lwork = work(1) -+ liwork = iwork(1) -+ deallocate(work, iwork) -+ allocate(work(lwork), stat = info) -+ allocate(iwork(liwork), stat = info) -+#ifdef COMPLEXCASE -+ lrwork = rwork(1) -+ deallocate(rwork) -+ allocate(rwork(lrwork), stat = info) -+#endif -+ ! the actuall call to the method -+#ifdef REALCASE -+ call p& -+ &BLAS_CHAR& -+ &syevr('V', 'I', 'U', na, a, 1, 1, sc_desc, 0.0_rk, 0.0_rk, smallest_ev_idx, largest_ev_idx, & -+ comp_eigenval, comp_eigenvec, ev, z, 1, 1, sc_desc, work, lwork, iwork, liwork, info) -+#endif -+#ifdef COMPLEXCASE -+ call p& -+ &BLAS_CHAR& -+ &heevr('V', 'I', 'U', na, a, 1, 1, sc_desc, 0.0_rk, 0.0_rk, smallest_ev_idx, largest_ev_idx, & -+ comp_eigenval, comp_eigenvec, ev, z, 1, 1, sc_desc, work, lwork, rwork, lrwork, iwork, liwork, info) -+#endif -+ assert(comp_eigenval == nev) -+ assert(comp_eigenvec == nev) -+ deallocate(iwork, work, rwork) -+ end subroutine -+ -diff -ruN elpa-2020.11.001/examples/shared/test_setup_mpi.F90 elpa-2020.11.001_ok/examples/shared/test_setup_mpi.F90 ---- elpa-2020.11.001/examples/shared/test_setup_mpi.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_setup_mpi.F90 2021-02-02 12:54:50.047215000 +0100 -@@ -0,0 +1,115 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+module test_setup_mpi -+ -+ contains -+ -+ subroutine setup_mpi(myid, nprocs) -+ use test_util -+ use ELPA_utilities -+ use precision_for_tests -+ implicit none -+ -+ TEST_INT_MPI_TYPE :: mpierr -+ -+ TEST_INT_TYPE, intent(out) :: myid, nprocs -+ TEST_INT_MPI_TYPE :: myidMPI, nprocsMPI -+#ifdef WITH_OPENMP_TRADITIONAL -+ TEST_INT_MPI_TYPE :: required_mpi_thread_level, & -+ provided_mpi_thread_level -+#endif -+ -+ -+#ifdef WITH_MPI -+ -+#ifndef WITH_OPENMP_TRADITIONAL -+ call mpi_init(mpierr) -+#else -+ required_mpi_thread_level = MPI_THREAD_MULTIPLE -+ -+ call mpi_init_thread(required_mpi_thread_level, & -+ provided_mpi_thread_level, mpierr) -+ -+ if (required_mpi_thread_level .ne. provided_mpi_thread_level) then -+ write(error_unit,*) "MPI ERROR: MPI_THREAD_MULTIPLE is not provided on this system" -+ write(error_unit,*) " only ", mpi_thread_level_name(provided_mpi_thread_level), " is available" -+ call MPI_FINALIZE(mpierr) -+ call exit(77) -+ endif -+#endif -+ call mpi_comm_rank(mpi_comm_world, myidMPI, mpierr) -+ call mpi_comm_size(mpi_comm_world, nprocsMPI,mpierr) -+ -+ myid = int(myidMPI,kind=BLAS_KIND) -+ nprocs = int(nprocsMPI,kind=BLAS_KIND) -+ -+ if (nprocs <= 1) then -+ print *, "The test programs must be run with more than 1 task to ensure that usage with MPI is actually tested" -+ stop 1 -+ endif -+#else -+ myid = 0 -+ nprocs = 1 -+#endif -+ -+ end subroutine -+ -+ -+end module -diff -ruN elpa-2020.11.001/examples/shared/tests_variable_definitions.F90 elpa-2020.11.001_ok/examples/shared/tests_variable_definitions.F90 ---- elpa-2020.11.001/examples/shared/tests_variable_definitions.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/tests_variable_definitions.F90 2021-02-02 12:54:50.045008000 +0100 -@@ -0,0 +1,65 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! https://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! This file was written by A. Marek, MPC -+ -+#include "config-f90.h" -+module precision_for_tests -+ use iso_c_binding, only : C_FLOAT, C_DOUBLE, C_FLOAT_COMPLEX, C_DOUBLE_COMPLEX, C_INT32_T, C_INT64_T, C_INT -+ -+ implicit none -+ integer, parameter :: rk8 = C_DOUBLE -+ integer, parameter :: rk4 = C_FLOAT -+ integer, parameter :: ck8 = C_DOUBLE_COMPLEX -+ integer, parameter :: ck4 = C_FLOAT_COMPLEX -+ integer, parameter :: ik = C_INT32_T -+ integer, parameter :: lik = C_INT64_T -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+ integer, parameter :: BLAS_KIND = C_INT64_T -+#else -+ integer, parameter :: BLAS_KIND = C_INT32_T -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+ integer, parameter :: MPI_KIND = C_INT64_T -+#else -+ integer, parameter :: MPI_KIND = C_INT32_T -+#endif -+end module precision_for_tests -diff -ruN elpa-2020.11.001/examples/shared/test_util.F90 elpa-2020.11.001_ok/examples/shared/test_util.F90 ---- elpa-2020.11.001/examples/shared/test_util.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/shared/test_util.F90 2021-02-02 12:54:50.055213000 +0100 -@@ -0,0 +1,156 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+#include "config-f90.h" -+#undef TEST_INT_TYPE -+#undef INT_TYPE -+#undef TEST_INT_MPI_TYPE -+#undef INT_MPI_TYPE -+ -+#ifdef HAVE_64BIT_INTEGER_MATH_SUPPORT -+#define TEST_INT_TYPE integer(kind=c_int64_t) -+#define INT_TYPE c_int64_t -+#else -+#define TEST_INT_TYPE integer(kind=c_int32_t) -+#define INT_TYPE c_int32_t -+#endif -+#ifdef HAVE_64BIT_INTEGER_MPI_SUPPORT -+#define TEST_INT_MPI_TYPE integer(kind=c_int64_t) -+#define INT_MPI_TYPE c_int64_t -+#else -+#define TEST_INT_MPI_TYPE integer(kind=c_int32_t) -+#define INT_MPI_TYPE c_int32_t -+#endif -+ -+module test_util -+ use iso_c_binding -+ use precision_for_tests -+#ifdef WITH_MPI -+#ifdef HAVE_MPI_MODULE -+ use mpi -+ implicit none -+#else -+ implicit none -+ include 'mpif.h' -+#endif -+#else -+ TEST_INT_MPI_TYPE, parameter :: mpi_comm_world = -1 -+#endif -+ -+ contains -+!> -+!> This function translates, if ELPA was build with OpenMP support, -+!> the found evel of "thread safetiness" from the internal number -+!> of the MPI library into a human understandable value -+!> -+!> \param level thread-saftiness of the MPI library -+!> \return str human understandable value of thread saftiness -+ pure function mpi_thread_level_name(level) result(str) -+ use, intrinsic :: iso_c_binding -+ implicit none -+ integer(kind=c_int), intent(in) :: level -+ character(len=21) :: str -+#ifdef WITH_MPI -+ select case(level) -+ case (MPI_THREAD_SINGLE) -+ str = "MPI_THREAD_SINGLE" -+ case (MPI_THREAD_FUNNELED) -+ str = "MPI_THREAD_FUNNELED" -+ case (MPI_THREAD_SERIALIZED) -+ str = "MPI_THREAD_SERIALIZED" -+ case (MPI_THREAD_MULTIPLE) -+ str = "MPI_THREAD_MULTIPLE" -+ case default -+ write(str,'(i0,1x,a)') level, "(Unknown level)" -+ end select -+#endif -+ end function -+ -+ function seconds() result(s) -+ integer :: ticks, tick_rate -+ real(kind=c_double) :: s -+ -+ call system_clock(count=ticks, count_rate=tick_rate) -+ s = real(ticks, kind=c_double) / tick_rate -+ end function -+ -+ subroutine x_a(condition, condition_string, file, line) -+#ifdef HAVE_ISO_FORTRAN_ENV -+ use iso_fortran_env, only : error_unit -+#endif -+ implicit none -+#ifndef HAVE_ISO_FORTRAN_ENV -+ integer, parameter :: error_unit = 0 -+#endif -+ logical, intent(in) :: condition -+ character(len=*), intent(in) :: condition_string -+ character(len=*), intent(in) :: file -+ integer, intent(in) :: line -+ -+ if (.not. condition) then -+ write(error_unit,'(a,i0)') "Assertion `" // condition_string // "` failed at " // file // ":", line -+ stop 1 -+ end if -+ end subroutine -+ -+ subroutine x_ao(error_code, error_code_string, file, line) -+ use elpa -+#ifdef HAVE_ISO_FORTRAN_ENV -+ use iso_fortran_env, only : error_unit -+#endif -+ implicit none -+#ifndef HAVE_ISO_FORTRAN_ENV -+ integer, parameter :: error_unit = 0 -+#endif -+ integer, intent(in) :: error_code -+ character(len=*), intent(in) :: error_code_string -+ character(len=*), intent(in) :: file -+ integer, intent(in) :: line -+ -+ if (error_code /= ELPA_OK) then -+ write(error_unit,'(a,i0)') "Assertion failed: `" // error_code_string // & -+ " is " // elpa_strerr(error_code) // "` at " // file // ":", line -+ stop 1 -+ end if -+ end subroutine -+end module -+ -diff -ruN elpa-2020.11.001/examples/test_real_e1.F90 elpa-2020.11.001_ok/examples/test_real_e1.F90 ---- elpa-2020.11.001/examples/test_real_e1.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/test_real_e1.F90 2021-02-18 14:16:15.389420020 +0100 -@@ -0,0 +1,255 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 1 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (4000, 1500, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+program test_real_example -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ -+ use iso_c_binding -+ -+ use elpa -+#ifdef WITH_OPENMP -+ use omp_lib -+#endif -+ -+ -+#ifdef HAVE_MPI_MODULE -+ use mpi -+ implicit none -+#else -+ implicit none -+ include 'mpif.h' -+#endif -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ integer :: nblk -+ integer :: na, nev -+ -+ integer :: np_rows, np_cols, na_rows, na_cols -+ -+ integer :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ integer :: i, mpierr, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ integer, external :: numroc -+ -+ real(kind=c_double), allocatable :: a(:,:), z(:,:), ev(:) -+ -+ integer :: iseed(4096) ! Random seed, size should be sufficient for every generator -+ -+ integer :: STATUS -+ integer :: success -+ character(len=8) :: task_suffix -+ integer :: j -+ -+ integer, parameter :: error_units = 0 -+ -+#ifdef WITH_OPENMP -+ integer n_threads -+#endif -+ class(elpa_t), pointer :: e -+ !------------------------------------------------------------------------------- -+ -+ -+ ! default parameters -+ na = 1000 -+ nev = 500 -+ nblk = 16 -+ -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world,myid,mpierr) -+ call mpi_comm_size(mpi_comm_world,nprocs,mpierr) -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ ! initialise BLACS -+ my_blacs_ctxt = mpi_comm_world -+ call BLACS_Gridinit(my_blacs_ctxt, 'C', np_rows, np_cols) -+ call BLACS_Gridinfo(my_blacs_ctxt, nprow, npcol, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ ! determine the neccessary size of the distributed matrices, -+ ! we use the scalapack tools routine NUMROC -+ -+#ifdef WITH_OPENMP -+ n_threads=omp_get_max_threads() -+#endif -+ -+ -+ na_rows = numroc(na, nblk, my_prow, 0, np_rows) -+ na_cols = numroc(na, nblk, my_pcol, 0, np_cols) -+ -+ -+ ! set up the scalapack descriptor for the checks below -+ ! For ELPA the following restrictions hold: -+ ! - block sizes in both directions must be identical (args 4 a. 5) -+ ! - first row and column of the distributed matrix must be on -+ ! row/col 0/0 (arg 6 and 7) -+ -+ call descinit(sc_desc, na, na, nblk, nblk, 0, 0, my_blacs_ctxt, na_rows, info) -+ -+ if (info .ne. 0) then -+ write(error_units,*) 'Error in BLACS descinit! info=',info -+ write(error_units,*) 'Most likely this happend since you want to use' -+ write(error_units,*) 'more MPI tasks than are possible for your' -+ write(error_units,*) 'problem size (matrix size and blocksize)!' -+ write(error_units,*) 'The blacsgrid can not be set up properly' -+ write(error_units,*) 'Try reducing the number of MPI tasks...' -+ call MPI_ABORT(mpi_comm_world, 1, mpierr) -+ endif -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ ! we want different random numbers on every process -+ ! (otherwise A might get rank deficient): -+ -+ iseed(:) = myid -+ call RANDOM_SEED(put=iseed) -+ call RANDOM_NUMBER(z) -+ -+ a(:,:) = z(:,:) -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)' -+ endif -+ call pdtran(na, na, 1.d0, z, 1, 1, sc_desc, 1.d0, a, 1, 1, sc_desc) ! A = A + Z**T -+ -+ !------------------------------------------------------------------------------- -+ -+ if (elpa_init(20171201) /= elpa_ok) then -+ print *, "ELPA API version not supported" -+ stop -+ endif -+ e => elpa_allocate() -+ -+ ! set parameters decribing the matrix and it's MPI distribution -+ call e%set("na", na, success) -+ call e%set("nev", nev, success) -+ call e%set("local_nrows", na_rows, success) -+ call e%set("local_ncols", na_cols, success) -+ call e%set("nblk", nblk, success) -+ call e%set("mpi_comm_parent", mpi_comm_world, success) -+ call e%set("process_row", my_prow, success) -+ call e%set("process_col", my_pcol, success) -+ -+#ifdef CUDA -+ call e%set("gpu", 1, success) -+#endif -+#ifdef WITH_OPENMP -+ call e%set("omp_threads", n_threads, success) -+#endif -+ success = e%setup() -+ -+ call e%set("solver", elpa_solver_1stage, success) -+ -+ -+ ! Calculate eigenvalues/eigenvectors -+ -+ if (myid==0) then -+ print '(a)','| Entering one-step ELPA solver ... ' -+ print * -+ end if -+ -+ call mpi_barrier(mpi_comm_world, mpierr) ! for correct timings only -+ call e%eigenvectors(a, ev, z, success) -+ -+ if (myid==0) then -+ print '(a)','| One-step ELPA solver complete.' -+ print * -+ end if -+ -+ call elpa_deallocate(e) -+ call elpa_uninit() -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+end -+ -diff -ruN elpa-2020.11.001/examples/test_real_e2.F90 elpa-2020.11.001_ok/examples/test_real_e2.F90 ---- elpa-2020.11.001/examples/test_real_e2.F90 1970-01-01 01:00:00.000000000 +0100 -+++ elpa-2020.11.001_ok/examples/test_real_e2.F90 2021-02-18 14:07:41.304404521 +0100 -@@ -0,0 +1,262 @@ -+! This file is part of ELPA. -+! -+! The ELPA library was originally created by the ELPA consortium, -+! consisting of the following organizations: -+! -+! - Max Planck Computing and Data Facility (MPCDF), formerly known as -+! Rechenzentrum Garching der Max-Planck-Gesellschaft (RZG), -+! - Bergische Universität Wuppertal, Lehrstuhl für angewandte -+! Informatik, -+! - Technische Universität München, Lehrstuhl für Informatik mit -+! Schwerpunkt Wissenschaftliches Rechnen , -+! - Fritz-Haber-Institut, Berlin, Abt. Theorie, -+! - Max-Plack-Institut für Mathematik in den Naturwissenschaften, -+! Leipzig, Abt. Komplexe Strukutren in Biologie und Kognition, -+! and -+! - IBM Deutschland GmbH -+! -+! -+! More information can be found here: -+! http://elpa.mpcdf.mpg.de/ -+! -+! ELPA is free software: you can redistribute it and/or modify -+! it under the terms of the version 3 of the license of the -+! GNU Lesser General Public License as published by the Free -+! Software Foundation. -+! -+! ELPA is distributed in the hope that it will be useful, -+! but WITHOUT ANY WARRANTY; without even the implied warranty of -+! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+! GNU Lesser General Public License for more details. -+! -+! You should have received a copy of the GNU Lesser General Public License -+! along with ELPA. If not, see <http://www.gnu.org/licenses/> -+! -+! ELPA reflects a substantial effort on the part of the original -+! ELPA consortium, and we ask you to respect the spirit of the -+! license that we chose: i.e., please contribute any changes you -+! may have back to the original ELPA library distribution, and keep -+! any derivatives of ELPA under the same license that we chose for -+! the original distribution, the GNU Lesser General Public License. -+! -+! -+!> -+!> Fortran test programm to demonstrates the use of -+!> ELPA 2 real case library. -+!> If "HAVE_REDIRECT" was defined at build time -+!> the stdout and stderr output of each MPI task -+!> can be redirected to files if the environment -+!> variable "REDIRECT_ELPA_TEST_OUTPUT" is set -+!> to "true". -+!> -+!> By calling executable [arg1] [arg2] [arg3] [arg4] -+!> one can define the size (arg1), the number of -+!> Eigenvectors to compute (arg2), and the blocking (arg3). -+!> If these values are not set default values (4000, 1500, 16) -+!> are choosen. -+!> If these values are set the 4th argument can be -+!> "output", which specifies that the EV's are written to -+!> an ascii file. -+!> -+program test_real_example -+ -+!------------------------------------------------------------------------------- -+! Standard eigenvalue problem - REAL version -+! -+! This program demonstrates the use of the ELPA module -+! together with standard scalapack routines -+! -+! Copyright of the original code rests with the authors inside the ELPA -+! consortium. The copyright of any additional modifications shall rest -+! with their original authors, but shall adhere to the licensing terms -+! distributed along with the original code in the file "COPYING". -+! -+!------------------------------------------------------------------------------- -+ -+ use iso_c_binding -+ -+ use elpa -+ -+#ifdef HAVE_MPI_MODULE -+ use mpi -+ implicit none -+#else -+ implicit none -+ include 'mpif.h' -+#endif -+ -+ !------------------------------------------------------------------------------- -+ ! Please set system size parameters below! -+ ! na: System size -+ ! nev: Number of eigenvectors to be calculated -+ ! nblk: Blocking factor in block cyclic distribution -+ !------------------------------------------------------------------------------- -+ -+ integer :: nblk -+ integer :: na, nev -+ -+ integer :: np_rows, np_cols, na_rows, na_cols -+ -+ integer :: myid, nprocs, my_prow, my_pcol, mpi_comm_rows, mpi_comm_cols -+ integer :: i, mpierr, my_blacs_ctxt, sc_desc(9), info, nprow, npcol -+ -+ integer, external :: numroc -+ -+ real(kind=c_double), allocatable :: a(:,:), z(:,:), ev(:) -+ -+ integer :: iseed(4096) ! Random seed, size should be sufficient for every generator -+ -+ integer :: STATUS -+ integer :: success -+ character(len=8) :: task_suffix -+ integer :: j -+ -+ integer, parameter :: error_units = 0 -+ -+ class(elpa_t), pointer :: e -+ !------------------------------------------------------------------------------- -+ -+ -+ ! default parameters -+ na = 1000 -+ nev = 500 -+ nblk = 16 -+ -+ call mpi_init(mpierr) -+ call mpi_comm_rank(mpi_comm_world,myid,mpierr) -+ call mpi_comm_size(mpi_comm_world,nprocs,mpierr) -+ -+ do np_cols = NINT(SQRT(REAL(nprocs))),2,-1 -+ if(mod(nprocs,np_cols) == 0 ) exit -+ enddo -+ ! at the end of the above loop, nprocs is always divisible by np_cols -+ -+ np_rows = nprocs/np_cols -+ -+ ! initialise BLACS -+ my_blacs_ctxt = mpi_comm_world -+ call BLACS_Gridinit(my_blacs_ctxt, 'C', np_rows, np_cols) -+ call BLACS_Gridinfo(my_blacs_ctxt, nprow, npcol, my_prow, my_pcol) -+ -+ if (myid==0) then -+ print '(a)','| Past BLACS_Gridinfo.' -+ end if -+ ! determine the neccessary size of the distributed matrices, -+ ! we use the scalapack tools routine NUMROC -+ -+ na_rows = numroc(na, nblk, my_prow, 0, np_rows) -+ na_cols = numroc(na, nblk, my_pcol, 0, np_cols) -+ -+ -+ ! set up the scalapack descriptor for the checks below -+ ! For ELPA the following restrictions hold: -+ ! - block sizes in both directions must be identical (args 4 a. 5) -+ ! - first row and column of the distributed matrix must be on -+ ! row/col 0/0 (arg 6 and 7) -+ -+ call descinit(sc_desc, na, na, nblk, nblk, 0, 0, my_blacs_ctxt, na_rows, info) -+ -+ if (info .ne. 0) then -+ write(error_units,*) 'Error in BLACS descinit! info=',info -+ write(error_units,*) 'Most likely this happend since you want to use' -+ write(error_units,*) 'more MPI tasks than are possible for your' -+ write(error_units,*) 'problem size (matrix size and blocksize)!' -+ write(error_units,*) 'The blacsgrid can not be set up properly' -+ write(error_units,*) 'Try reducing the number of MPI tasks...' -+ call MPI_ABORT(mpi_comm_world, 1, mpierr) -+ endif -+ -+ if (myid==0) then -+ print '(a)','| Past scalapack descriptor setup.' -+ end if -+ -+ allocate(a (na_rows,na_cols)) -+ allocate(z (na_rows,na_cols)) -+ -+ allocate(ev(na)) -+ -+ ! we want different random numbers on every process -+ ! (otherwise A might get rank deficient): -+ -+ iseed(:) = myid -+ call RANDOM_SEED(put=iseed) -+ call RANDOM_NUMBER(z) -+ -+ a(:,:) = z(:,:) -+ -+ if (myid == 0) then -+ print '(a)','| Random matrix block has been set up. (only processor 0 confirms this step)' -+ endif -+ call pdtran(na, na, 1.d0, z, 1, 1, sc_desc, 1.d0, a, 1, 1, sc_desc) ! A = A + Z**T -+ -+ !------------------------------------------------------------------------------- -+ -+ if (elpa_init(20171201) /= elpa_ok) then -+ print *, "ELPA API version not supported" -+ stop -+ endif -+ e => elpa_allocate() -+ -+ ! set parameters decribing the matrix and it's MPI distribution -+ call e%set("na", na, success) -+ call e%set("nev", nev, success) -+ call e%set("local_nrows", na_rows, success) -+ call e%set("local_ncols", na_cols, success) -+ call e%set("nblk", nblk, success) -+ call e%set("mpi_comm_parent", mpi_comm_world, success) -+ call e%set("process_row", my_prow, success) -+ call e%set("process_col", my_pcol, success) -+#ifdef CUDA -+ call e%set("gpu", 1, success) -+#endif -+ -+ success = e%setup() -+ -+ call e%set("solver", elpa_solver_2stage, success) -+ -+#ifdef CUDAKERNEL -+ call e%set("real_kernel", ELPA_2STAGE_REAL_GPU, success) -+#endif -+#ifdef AVX512 -+ call e%set("real_kernel", ELPA_2STAGE_REAL_AVX512_BLOCK2,success ) -+#endif -+#ifdef AVX2_B6 -+ call e%set("real_kernel", ELPA_2STAGE_REAL_AVX2_BLOCK6,success ) -+#endif -+#ifdef AVX2_B4 -+ call e%set("real_kernel", ELPA_2STAGE_REAL_AVX2_BLOCK4,success ) -+#endif -+#ifdef AVX2_B2 -+ call e%set("real_kernel", ELPA_2STAGE_REAL_AVX2_BLOCK2,success ) -+#endif -+#ifdef GENERIC -+ call e%set("real_kernel", ELPA_2STAGE_REAL_GENERIC,success ) -+#endif -+#ifdef GENERIC_SIMPLE -+ call e%set("real_kernel", ELPA_2STAGE_REAL_GENERIC_SIMPLE,success ) -+#endif -+ -+ -+ ! Calculate eigenvalues/eigenvectors -+ -+ if (myid==0) then -+ print '(a)','| Entering two-step ELPA solver ... ' -+ print * -+ end if -+ -+ call mpi_barrier(mpi_comm_world, mpierr) ! for correct timings only -+ call e%eigenvectors(a, ev, z, success) -+ -+ if (myid==0) then -+ print '(a)','| Two-step ELPA solver complete.' -+ print * -+ end if -+ -+ call elpa_deallocate(e) -+ call elpa_uninit() -+ -+ call blacs_gridexit(my_blacs_ctxt) -+ call mpi_finalize(mpierr) -+ -+end -+ diff --git a/Overlays/juwelsbooster_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb b/Overlays/juwelsbooster_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb deleted file mode 100644 index edef15b1e9d240917a98d3aa3761e126b43eec31..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/f/FFTW/FFTW-3.3.8-gompi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gompi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because AMD zen processors do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Overlays/juwelsbooster_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb b/Overlays/juwelsbooster_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb deleted file mode 100644 index fd45dceca6a1fddc684a6100db02901943a40010..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/f/FFTW/FFTW-3.3.8-gpsmpi-2020.eb +++ /dev/null @@ -1,36 +0,0 @@ -name = 'FFTW' -version = '3.3.8' - -homepage = 'http://www.fftw.org' -description = """FFTW is a C subroutine library for computing the discrete -Fourier transform (DFT) in one or more dimensions, of arbitrary input size, -and of both real and complex data.""" - -site_contacts = 'i.gutheil@fz-juelich.de' - -toolchain = {'name': 'gpsmpi', 'version': '2020'} -toolchainopts = {'pic': True} - -source_urls = [homepage] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['6113262f6e92c5bd474f2875fa1b01054c4ad5040f6b0da7c03c98821d9ae303'] - -# no quad precision, requires GCC v4.6 or higher -# see also -# http://www.fftw.org/doc/Extended-and-quadruple-precision-in-Fortran.html -with_quad_prec = True - -# compilation fails on AMD systems when configuring with --enable-avx-128-fma, -# because AMD zen processors do not support FMA4 instructions -use_fma4 = False - -# can't find mpirun/mpiexec and fails -# runtest = 'check' - -modextravars = { - 'FFTW_ROOT': '%(installdir)s', - 'FFTW_INCLUDE': '%(installdir)s/include', - 'FFTW_LIB': '%(installdir)s/lib', -} - -moduleclass = 'numlib' diff --git a/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb b/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb deleted file mode 100644 index d1cd2d234aa3003b20e5ec3ca3b519616fc95ad0..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-iimpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iimpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb b/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb deleted file mode 100644 index 5502b43c6c03ece499621671ab45e24840c39c78..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-iompi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'iompi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb b/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb deleted file mode 100644 index 5c44a9055c120e2d15382588bbeaa6f0668fe39b..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2020.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2020'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb b/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb deleted file mode 100644 index 62645ee3f76e5b2e60b8aae2e9569a597f81c430..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/h/HDF5/HDF5-1.10.6-ipsmpi-2021.eb +++ /dev/null @@ -1,27 +0,0 @@ -name = 'HDF5' -version = '1.10.6' - -homepage = 'http://www.hdfgroup.org/HDF5/' -description = """HDF5 is a unique technology suite that makes possible the management of - extremely large and complex data collections. -""" - -site_contacts = 's.luehrs@fz-juelich.de' - -toolchain = {'name': 'ipsmpi', 'version': '2021'} -toolchainopts = {'optarch': True, 'pic': True, 'usempi': True} - -source_urls = [ - 'https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major)s.%(version_minor)s/hdf5-%(version)s/src' -] -sources = [SOURCELOWER_TAR_GZ] -checksums = ['5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa'] - -patches = ['skip_fortran_fp_kind_test.patch'] - -dependencies = [ - ('zlib', '1.2.11'), - ('Szip', '2.1.1'), -] - -moduleclass = 'data' diff --git a/Overlays/juwelsbooster_overlay/h/HDF5/skip_fortran_fp_kind_test.patch b/Overlays/juwelsbooster_overlay/h/HDF5/skip_fortran_fp_kind_test.patch deleted file mode 100644 index cc1b1035394e9d9c59914ee1810854ba6a75c4cd..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/h/HDF5/skip_fortran_fp_kind_test.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -Nru hdf5-1.10.6_orig/fortran/test/H5_test_buildiface.F90 hdf5-1.10.6/fortran/test/H5_test_buildiface.F90 ---- hdf5-1.10.6_orig/fortran/test/H5_test_buildiface.F90 2020-10-21 12:16:45.673723000 +0200 -+++ hdf5-1.10.6/fortran/test/H5_test_buildiface.F90 2020-10-21 13:09:22.455045487 +0200 -@@ -215,7 +215,7 @@ - WRITE(11,'(A)') ' INTEGER, OPTIONAL, INTENT( IN ) :: ulp' - WRITE(11,'(A)') ' IF ( PRESENT( ulp ) ) Rel = REAL( ABS(ulp), '//TRIM(ADJUSTL(chr2))//')' - WRITE(11,'(A)') ' Rel = 1.0_'//TRIM(ADJUSTL(chr2)) -- WRITE(11,'(A)') ' real_eq_kind_'//TRIM(ADJUSTL(chr2))//' = ABS( a - b ) < ( Rel * SPACING( MAX(ABS(a),ABS(b)) ) )' -+ WRITE(11,'(A)') ' real_eq_kind_'//TRIM(ADJUSTL(chr2))//' = 0 < 1' - WRITE(11,'(A)') ' END FUNCTION real_eq_kind_'//TRIM(ADJUSTL(chr2)) - ENDDO - diff --git a/Overlays/juwelsbooster_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb b/Overlays/juwelsbooster_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb deleted file mode 100644 index 14f9dd1ef8e328d779c3f7fe85f19bb9c1909a19..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/i/iccifort/iccifort-2020.2.254-GCC-9.3.0.eb +++ /dev/null @@ -1,64 +0,0 @@ -name = 'iccifort' -version = '2020.2.254' -versionsuffix = '-GCC-9.3.0' - -homepage = 'https://software.intel.com/en-us/intel-compilers/' -description = "Intel C, C++ & Fortran compilers" - -modloadmsg = ''' -We have observed situations where the Intel compiler does not enable vectorization and is overly -conservative when applying optimizations on AMD systems when using -xHost. Other sites report -similar issues also with -xCORE-AVX2. -Our preeliminary results indicate that -march=core-avx2 works as expected and delivers good -performance. - -Please report performance problems on our installed stack with Intel compilers to sc@fz-juelich.de -and keep it in mind when compiling your code with Intel compilers in this system. -''' - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16744/'] -sources = ['parallel_studio_xe_%(version_major)s_update%(version_minor)s_cluster_edition.tgz'] -patches = ['iccifort-%(version)s_no_mpi_rt_dependency.patch'] -checksums = [ - # parallel_studio_xe_2020_update2_composer_edition.tgz - '4795c44374e8988b91da20ac8f13022d7d773461def4a26ca210a8694f69f133', - # iccifort-2020.2.254_no_mpi_rt_dependency.patch - '73e582d9e108d0680c19c14e9a9c49dbbb06829e39ba8ed87bfd6b4222231196', -] - -local_gccver = '9.3.0' - -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), -] - -# list of regex for components to install -# full list of components can be obtained from pset/mediaconfig.xml in unpacked sources -# cfr. https://software.intel.com/en-us/articles/intel-composer-xe-2015-silent-installation-guide -components = [ - 'intel-comp', 'intel-ccomp', 'intel-fcomp', 'intel-icc', 'intel-ifort', - 'intel-openmp', 'intel-ipsc?_', 'intel-gdb(?!.*mic)' -] - -dontcreateinstalldir = True - -# disable data collection -modextravars = { - 'INTEL_DISABLE_ISIP': '1' -} - -# We have a custom naming scheme that allows us to use a more descriptive module name -modaltsoftname = 'Intel' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = 'family("compiler")' - -# Always do a recursive unload on compilers -recursive_module_unload = True - -moduleclass = 'compiler' diff --git a/Overlays/juwelsbooster_overlay/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb b/Overlays/juwelsbooster_overlay/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb deleted file mode 100644 index 35a3b40805f3d2068ff0cc8d4ec8bedb40544b0f..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/l/LAMMPS/LAMMPS-24Dec2020-gpsmkl-2020-CUDA.eb +++ /dev/null @@ -1,168 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '24Dec2020' -versionsuffix = '-CUDA' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'patch_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -cuda_compute_capabilities = ['8.0'] - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# Use the bfd linker for C++ (this will only be picked up when using Kokkos) -preconfigopts = 'export CXXFLAGS="-fuse-ld=bfd $CXXFLAGS" &&' -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'chem' diff --git a/Overlays/juwelsbooster_overlay/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb b/Overlays/juwelsbooster_overlay/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb deleted file mode 100644 index 7210f37bc48a792d042c384051407ef0e62e7057..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/l/LAMMPS/LAMMPS-29Oct2020-gpsmkl-2020-CUDA.eb +++ /dev/null @@ -1,168 +0,0 @@ -# Installation command used: -# eb --include-easyblocks=$PWD/Custom_EasyBlocks/lammps.py,"$EASYBUILD_INCLUDE_EASYBLOCKS"\\ -# --mpi-cmd-template='echo %(nr_ranks)s && %(cmd)s' \\ -# Golden_Repo/l/LAMMPS/LAMMPS-22Oct2020-intel-para-2020-Python-3.8.5.eb -name = 'LAMMPS' -version = '29Oct2020' -versionsuffix = '-CUDA' - -homepage = 'https://lammps.sandia.gov/' -description = """LAMMPS is a classical molecular dynamics code, and an acronym -for Large-scale Atomic/Molecular Massively Parallel Simulator. LAMMPS has -potentials for solid-state materials (metals, semiconductors) and soft matter -(biomolecules, polymers) and coarse-grained or mesoscopic systems. It can be -used to model atoms or, more generically, as a parallel particle simulator at -the atomic, meso, or continuum scale. LAMMPS runs on single processors or in -parallel using message-passing techniques and a spatial-decomposition of the -simulation domain. The code is designed to be easy to modify or extend with new -functionality. -""" - -site_contacts = 'a.kreuzer@fz-juelich.de' - -toolchain = {'name': 'gpsmkl', 'version': '2020'} -toolchainopts = {'openmp': True, 'cstd': 'c++11', 'usempi': True} - -# 'https://github.com/lammps/lammps/archive/' -source_urls = [GITHUB_LOWER_SOURCE] -sources = [ - 'stable_%(version)s.tar.gz', - {'extract_cmd': 'cp %s %(builddir)s', 'filename': 'lammps_vs_yaff_test_single_point_energy.py'}, -] - -builddependencies = [ - ('CMake', '3.18.0'), - ('pkg-config', '0.29.2'), - ('archspec', '0.1.0', '-Python-%(pyver)s'), -] -dependencies = [ - ('CUDA', '11.0', '', SYSTEM), - ('Python', '3.8.5'), - ('libpng', '1.6.37'), - ('libjpeg-turbo', '2.0.5'), - ('netCDF', '4.7.4'), - ('GSL', '2.6'), - ('zlib', '1.2.11'), - ('gzip', '1.10'), - ('cURL', '7.71.1'), - ('HDF5', '1.10.6'), - ('tbb', '2020.3'), - ('PCRE', '8.44'), - ('libxml2', '2.9.10'), - ('FFmpeg', '4.3.1'), - ('Voro++', '0.4.6'), - ('kim-api', '2.1.3'), - ('Eigen', '3.3.7'), - ('yaff', '1.6.0', '-Python-%(pyver)s'), - ('PLUMED', '2.6.1'), - ('ScaFaCoS', '1.0.1'), - # See below for why this is not included - # ('VTK', '8.2.0', local_python_versionsuffix), -] - -# not enabled (yet), needs more work/additional dependencies: -# ADIOS - https://lammps.sandia.gov/doc/Build_extras.html#user-adios-package -# AWPMD - https://lammps.sandia.gov/doc/Build_extras.html#user-awpmd-package -# QMMM - https://lammps.sandia.gov/doc/Packages_details.html#pkg-user-qmmm -# QUIP - https://lammps.sandia.gov/doc/Build_extras.html#user-quip-package -# VTK - support is available in the foss version but currently fails to build for intel -# due to https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/746611 -# see https://github.com/lammps/lammps/issues/1964 for details -user_packages = [ - 'ATC', - 'BOCS', - 'CGDNA', - 'CGSDK', - 'COLVARS', - 'DIFFRACTION', - 'DPD', - 'DRUDE', - 'EFF', - 'FEP', - 'H5MD', - 'LB', - 'MANIFOLD', - 'MEAMC', - 'MESODPD', - 'MESONT', - 'MGPT', - 'MISC', - 'MOFFF', - 'MOLFILE', - 'NETCDF', - 'PHONON', - 'PLUMED', - 'PTM', - 'QTB', - 'REACTION', - 'REAXC', - 'SCAFACOS', - 'SDPD', - 'SMD', - 'SMTBQ', - 'SPH', - 'TALLY', - 'UEF', - 'YAFF', -] -enhance_sanity_check = True - -cuda_compute_capabilities = ['8.0'] - -# To use additional custom configuration options, use the 'configopts' easyconfig parameter -# See docs and lammps easyblock for more information. -# https://github.com/lammps/lammps/blob/master/cmake/README.md#lammps-configuration-options - -# Use the bfd linker for C++ (this will only be picked up when using Kokkos) -preconfigopts = 'export CXXFLAGS="-fuse-ld=bfd $CXXFLAGS" &&' -# docs require virtualenv (which we don't have) -configopts = ' -DBUILD_DOC=off -DPKG_USER-INTEL=off ' - -# auto-enabled by easyblock -# 'GPU' - if cuda package is present and kokkos is disabled -# 'KOKKOS' - if kokkos is enabled (by default) -# -# not enabled (yet), needs more work/additional dependencies: -# 'LATTE', - https://lammps.sandia.gov/doc/Build_extras.html#latte-package -# 'MSCG', - https://lammps.sandia.gov/doc/Build_extras.html#mscg-package -general_packages = [ - 'ASPHERE', - 'BODY', - 'CLASS2', - 'COLLOID', - 'COMPRESS', - 'CORESHELL', - 'DIPOLE', - 'GRANULAR', - 'KIM', - 'KSPACE', - 'MANYBODY', - 'MC', - 'MESSAGE', - 'MISC', - 'MLIAP', - 'MOLECULE', - 'MPIIO', - 'PERI', - 'POEMS', - 'PYTHON', - 'QEQ', - 'REPLICA', - 'RIGID', - 'SHOCK', - 'SNAP', - 'SPIN', - 'SRD', - 'VORONOI', -] - -# run short test case to make sure installation doesn't produce blatently incorrect results; -# this catches a problem where having the USER-INTEL package enabled causes trouble when installing with intel/2019b -# (requires an MPI context for intel/2020a) -# sanity_check_commands = ['cd %(builddir)s && %(mpi_cmd_prefix)s python lammps_vs_yaff_test_single_point_energy.py'] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'chem' diff --git a/Overlays/juwelsbooster_overlay/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb b/Overlays/juwelsbooster_overlay/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb deleted file mode 100644 index b9dbd75dd8d1cbd62799493f7774344ff89c6b2f..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/n/NVHPC/NVHPC-20.11-GCC-9.3.0.eb +++ /dev/null @@ -1,85 +0,0 @@ -name = 'NVHPC' -version = '20.11' -local_gccver = '9.3.0' -versionsuffix = '-GCC-%s' % local_gccver - -homepage = 'https://developer.nvidia.com/hpc-sdk/' -description = """C, C++ and Fortran compilers included with the NVIDIA HPC SDK (previously: PGI)""" -site_contacts = 'a.herten@fz-juelich.de' - -toolchain = SYSTEM - -# By downloading, you accept the HPC SDK Software License Agreement (https://docs.nvidia.com/hpc-sdk/eula/index.html) -# accept_eula = True -source_urls = ['https://developer.download.nvidia.com/hpc-sdk/%(version)s/'] -local_tarball_tmpl = 'nvhpc_2020_%%(version_major)s%%(version_minor)s_Linux_%s_cuda_multi.tar.gz' -sources = [local_tarball_tmpl % '%(arch)s'] -checksums = [ - { - local_tarball_tmpl % 'x86_64': - 'c80fc26e5ba586696f7030f03054c1aaca0752a891c7923faf47eb23b66857ec', - local_tarball_tmpl % 'ppc64le': - '99e5a5437e82f3914e0fe81feb761a5b599a3fe8b31f3c2cac8ae47e8cdc7b0f' - } -] - -local_gccver = '9.3.0' -dependencies = [ - ('GCCcore', local_gccver), - ('binutils', '2.34', '', ('GCCcore', local_gccver)), - ('CUDA', '11.0', '', SYSTEM), - # This is necessary to avoid cases where just libnuma.so.1 is present in the system and -lnuma fails - ('numactl', '2.0.13', '', SYSTEM) -] - -module_add_cuda = False -cuda_compute_capabilities = "8.0" - -# specify default CUDA version that should be used by NVHPC -# should match one of the CUDA versions that are included with this NVHPC version -# (see install_components/Linux_x86_64/20.7/cuda/) -# for NVHPC 20.7, those are: 11.0, 10.2, 10.1; -# this version can be tweaked from the EasyBuild command line with -# --try-amend=default_cuda_version="10.2" (for example) -default_cuda_version = '11.0' - -# NVHPC EasyBlock supports some features, which can be set via CLI or this easyconfig. -# The following list gives examples for the easyconfig -# -# NVHPC needs CUDA to work. Two options are available: 1) Use NVHPC-bundled CUDA, 2) use system CUDA -# 1) Bundled CUDA -# If no easybuild dependency to CUDA is present, the bundled CUDA is taken. A version needs to be specified with -# default_cuda_version = "11.0" -# in this easyconfig file; alternatively, it can be specified through the command line during installation with -# --try-amend=default_cuda_version="10.2" -# 2) CUDA provided via EasyBuild -# Use CUDAcore as a dependency, for example -# dependencies = [('CUDAcore', '11.0.2')] -# The parameter default_cuda_version still can be set as above. -# If not set, it will be deduced from the CUDA module (via $EBVERSIONCUDA) -# -# Define a NVHPC-default Compute Capability -# cuda_compute_capabilities = "8.0" -# Can also be specified on the EasyBuild command line via --cuda-compute-capabilities=8.0 -# Only single values supported, not lists of values! -# -# Options to add/remove things to/from environment module (defaults shown) -# module_byo_compilers = False # Remove compilers from PATH (Bring-your-own compilers) -# module_nvhpc_own_mpi = False # Add NVHPC's own pre-compiled OpenMPI -# module_add_math_libs = False # Add NVHPC's math libraries (which should be there from CUDA anyway) -# module_add_profilers = False # Add NVHPC's NVIDIA Profilers -# module_add_nccl = False # Add NVHPC's NCCL library -# module_add_nvshmem = False # Add NVHPC's NVSHMEM library -# module_add_cuda = False # Add NVHPC's bundled CUDA - -# this bundle serves as a compiler-only toolchain, so it should be marked as compiler (important for HMNS) -moduleclass = 'compiler' - -# We use a HMNS, so let's enforce a unique compiler -modluafooter = ''' -family("compiler") -add_property("arch","gpu") -''' - -# Always do a recursive unload on compilers -recursive_module_unload = True diff --git a/Overlays/juwelsbooster_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb b/Overlays/juwelsbooster_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb deleted file mode 100644 index 030ece8ba92f6d9c2db0bd9b7ca5aa461175d250..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/n/Nsight-Compute/Nsight-Compute-2020.2.0-GCCcore-9.3.0.eb +++ /dev/null @@ -1,59 +0,0 @@ -# jg (CSCS) -# AH (JSC) -easyblock = 'Binary' - -name = 'Nsight-Compute' -version = '2020.2.0' -homepage = 'https://developer.nvidia.com/nsight-compute' -description = 'NVIDIA Nsight Compute is an interactive kernel profiler for CUDA applications' - -site_contacts = 'Andreas Herten <a.herten@fz-juelich.de>' - -toolchain = {'name': 'GCCcore', 'version': '9.3.0'} - -# Download source binary package manually, requires Nvidia Developer Account -# source_urls = 'https://developer.nvidia.com/nsight-compute' -sources = [{ - 'filename': 'nsight-compute-linux-%(version)s.18-28964561.run', - 'extract_cmd': '/bin/sh %s' -}] -checksums = ['604769a55a72adce8f1513fcacb36d7cf5b5e3cc99b65d6a20e4d5e987344cb0'] - -# Not really necessary, but useful if we use this as a template for another package -builddependencies = [ - ('binutils', '2.34'), -] - -dependencies = [ - # ('nvidia-driver', 'default', '', SYSTEM), - ('X11', '20200222') -] - -extract_sources = True -unpack_options = '--nochown --noexec --nox11 --target %(builddir)s' - -install_cmd = 'cp -r %(builddir)s/pkg/* %(installdir)s/' - -# Workaround 1) (find) due to wrong permissions once the files are extracted from the .run file -# Workaround 2) (mv) due to CentOS 8 coming with a newer libk5crypto.so which doesn't have the symbols -# anymore needed by Nsight Compute's own libcrypto.so. Removing / renaming the shipped -# libcrypto.so makes Nsight Compute pick up the system libcrypto.so and everything is -# grand again. Bug has been filed by Andreas, 13.10.2020 - -local_libcrypto_path = '%(installdir)s/host/linux-desktop-glibc_2_11_3-x64/libcrypto.so' -postinstallcmds = [ - 'find %(installdir)s -type f -and -executable -and ! -name "lib*" -exec chmod go+x {} \;', - 'mv %s %s.bak' % (local_libcrypto_path, local_libcrypto_path), - 'mv %s.1.1 %s.1.1.bak' % (local_libcrypto_path, local_libcrypto_path) -] - -sanity_check_paths = { - 'files': ['ncu-ui', 'ncu'], - 'dirs': ['docs', 'extras', 'host', 'sections', 'target'] -} - -modluafooter = """ -add_property("arch","gpu") -""" - -moduleclass = 'tools' diff --git a/Overlays/juwelsbooster_overlay/n/nvidia-driver/nvidia-driver-default.eb b/Overlays/juwelsbooster_overlay/n/nvidia-driver/nvidia-driver-default.eb deleted file mode 100644 index 4606bd345167e79e6690a45a391f96a9957d48b7..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/n/nvidia-driver/nvidia-driver-default.eb +++ /dev/null @@ -1,19 +0,0 @@ -name = 'nvidia-driver' -version = 'default' -realversion = '460.32.03' - -homepage = 'https://developer.nvidia.com/cuda-toolkit' -description = """This is a set of libraries normally installed by the NVIDIA driver installer.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = ['http://us.download.nvidia.com/tesla/%s/' % realversion] -sources = ['NVIDIA-Linux-x86_64-%s.run' % realversion] - -modluafooter = ''' -add_property("arch","gpu") -''' - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb b/Overlays/juwelsbooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb deleted file mode 100644 index a1543be921a2c8fa2b2c559b6f5314f69d884245..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA-low-latency.eb +++ /dev/null @@ -1,58 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default OpenMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'UCX_MAX_RNDV_RAILS': '1', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': '^uct,openib', - 'OMPI_MCA_btl_openib_allow_ib': '1', - 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - 'OMPI_MCA_coll': '^ml', - 'OMPI_MCA_coll_hcoll_enable': '1', - 'OMPI_MCA_coll_hcoll_np': '0', - 'OMPI_MCA_pml': 'ucx', - 'OMPI_MCA_osc': '^rdma', - 'OMPI_MCA_opal_abort_print_stack': '1', - 'OMPI_MCA_opal_set_max_sys_limits': '1', - 'OMPI_MCA_opal_event_include': 'epoll', - 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # OMPIO does not seem to work reliably on our system - 'OMPI_MCA_io': 'romio321', -} - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb b/Overlays/juwelsbooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb deleted file mode 100644 index e276b02b5ce269373fd0b9443d0fb71b94cd98a5..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/o/OpenMPI-settings/OpenMPI-settings-4.1-CUDA.eb +++ /dev/null @@ -1,45 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'OpenMPI-settings' -version = '4.1' -versionsuffix = 'CUDA' - -homepage = '' -description = """This is a module to load the default OpenMPI configuration""" - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'SLURM_MPI_TYPE': 'pspmix', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'UCX_MAX_RNDV_RAILS': '1', - 'OMPI_MCA_mca_base_component_show_load_errors': '1', - 'OMPI_MCA_mpi_param_check': '1', - 'OMPI_MCA_mpi_show_handle_leaks': '1', - 'OMPI_MCA_mpi_warn_on_fork': '1', - # Disable uct for the time being due to: - # https://github.com/openucx/ucx/wiki/OpenMPI-and-OpenSHMEM-installation-with-UCX#running-open-mpi-with-ucx - # Also openib, since it is deprecated and should be substituted by the UCX support in the pml - 'OMPI_MCA_btl': '^uct,openib', - 'OMPI_MCA_btl_openib_allow_ib': '1', - 'OMPI_MCA_bml_r2_show_unreach_errors': '0', - 'OMPI_MCA_coll': '^ml', - 'OMPI_MCA_coll_hcoll_enable': '1', - 'OMPI_MCA_coll_hcoll_np': '0', - 'OMPI_MCA_pml': 'ucx', - 'OMPI_MCA_osc': '^rdma', - 'OMPI_MCA_opal_abort_print_stack': '1', - 'OMPI_MCA_opal_set_max_sys_limits': '1', - 'OMPI_MCA_opal_event_include': 'epoll', - 'OMPI_MCA_btl_openib_warn_default_gid_prefix': '0', - # OMPIO does not seem to work reliably on our system - 'OMPI_MCA_io': 'romio321', -} - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb b/Overlays/juwelsbooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb deleted file mode 100644 index ab139ab9b6585b6378b990ab154463b778a96764..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/o/OpenMPI/OpenMPI-4.1.0rc1-GCC-9.3.0.eb +++ /dev/null @@ -1,67 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'OpenMPI' -version = '4.1.0rc1' - -homepage = 'http://www.open-mpi.org/' -description = """The Open MPI Project is an open source MPI-3 implementation.""" - -site_contacts = 'sc@fz-juelich.de' - -toolchain = {'name': 'GCC', 'version': '9.3.0'} - -source_urls = ['http://www.open-mpi.org/software/ompi/v%(version_major_minor)s/downloads'] -sources = [SOURCELOWER_TAR_GZ] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), - # needed for --with-pmix - ('pmix-devel'), -] - -builddependencies = [ - ('Autotools', '20200321'), -] - -dependencies = [ - ('zlib', '1.2.11'), - ('hwloc', '2.2.0'), - ('UCX', '1.9.0', '', SYSTEM), - ('CUDA', '11.0', '', SYSTEM), - ('libevent', '2.1.12'), -] - -configopts = '--enable-shared ' -configopts += '--with-hwloc=$EBROOTHWLOC ' # hwloc support -configopts += '--with-ucx=$EBROOTUCX ' -configopts += '--with-verbs ' -configopts += '--with-libevent=$EBROOTLIBEVENT ' -configopts += '--without-orte ' -configopts += '--without-psm2 ' -configopts += '--disable-oshmem ' -configopts += '--with-cuda=$EBROOTCUDA ' -configopts += '--with-ime=/opt/ddn/ime ' -configopts += '--with-gpfs ' - -# to enable SLURM integration (site-specific) -configopts += '--with-slurm --with-pmix=external --with-libevent=external --with-ompi-pmix-rte' - -local_libs = ["mpi_mpifh", "mpi", "ompitrace", "open-pal", "open-rte"] -sanity_check_paths = { - 'files': ["bin/%s" % local_binfile for local_binfile in ["ompi_info", "opal_wrapper"]] + - ["lib/lib%s.%s" % (local_libfile, SHLIB_EXT) for local_libfile in local_libs] + - ["include/%s.h" % x for x in ["mpi-ext", "mpif-config", "mpif", "mpi", "mpi_portable_platform"]], - 'dirs': [], -} - -# Add a family for our naming scheme -modluafooter = ''' -family("mpi") -add_property("arch","gpu") -if not ( isloaded("mpi-settings") ) then - load("mpi-settings") -end -''' - -moduleclass = 'mpi' diff --git a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb b/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb deleted file mode 100644 index f535c63f0d4ecea43a7f82cb2198a69c0a22b1de..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/PyTorch-Geometric-1.6.3-gcccoremkl-9.3.0-2020.2.254-Python-3.8.5-PyTorch-1.7.0.eb +++ /dev/null @@ -1,94 +0,0 @@ -easyblock = 'PythonBundle' - -name = 'PyTorch-Geometric' -version = '1.6.3' -local_pytorch_ver = '1.7.0' -versionsuffix = '-Python-%%(pyver)s-PyTorch-%s' % local_pytorch_ver - -homepage = 'https://github.com/rusty1s/pytorch_geometric' -description = "PyTorch Geometric (PyG) is a geometric deep learning extension library for PyTorch." - -site_contacts = 't.breuer@fz-juelich.de' - -toolchain = {'name': 'gcccoremkl', 'version': '9.3.0-2020.2.254'} - -local_pysuff = '-Python-%(pyver)s' -dependencies = [ - ('Python', '3.8.5'), - ('PyTorch', local_pytorch_ver, local_pysuff), - ('numba', '0.51.1', local_pysuff), - ('h5py', '2.10.0', '-serial%s' % local_pysuff), - ('scikit', '2020', local_pysuff), - ('torchvision', '0.8.2', local_pysuff), - ('trimesh', '3.8.11', local_pysuff), - ('METIS', '5.1.0', '-IDX64'), -] - -use_pip = True - -# this is a bundle of Python packages -exts_defaultclass = 'PythonPackage' -exts_download_dep_fail = True -exts_default_options = {'source_urls': [PYPI_SOURCE]} - -exts_list = [ - ('gdist', '1.0.3', { - 'source_urls': ['https://pypi.python.org/packages/source/g/gdist'], - 'modulename': 'gdist', - }), - ('googledrivedownloader', '0.4', { - 'checksums': ['4b34c1337b2ff3bf2bd7581818efbdcaea7d50ffd484ccf80809688f5ca0e204'], - 'modulename': 'google_drive_downloader', - }), - ('plyfile', '0.7.2', { - 'checksums': ['59a25845d00a51098e6c9147c3c96ce89ad97395e256a4fabb4aed7cf7db5541'], - }), - ('torch_scatter', '2.0.5', { - 'patches': ['torch_scatter-2.0.5-sm_80.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '2.0.5.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_scatter/archive/'], - }), - ('torch_sparse', '0.6.8', { - 'patches': ['torch_sparse-0.6.8-sm_80.patch'], - 'prebuildopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'preinstallopts': 'FORCE_CUDA=1 WITH_METIS=1', - 'source_tmpl': '0.6.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_sparse/archive/'], - }), - ('torch_cluster', '1.5.8', { - 'patches': ['torch_cluster-1.5.8-sm_80.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.5.8.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_cluster/archive/'], - }), - ('torch_spline_conv', '1.2.0', { - 'patches': ['torch_spline_conv-1.2.0-sm_80.patch'], - 'prebuildopts': 'FORCE_CUDA=1', - 'preinstallopts': 'FORCE_CUDA=1', - 'source_tmpl': '1.2.0.tar.gz', - 'source_urls': ['https://github.com/rusty1s/pytorch_spline_conv/archive'], - }), - ('ase', '3.21.0', { - 'source_urls': ['https://pypi.python.org/packages/source/a/ase'], - 'modulename': 'ase', - }), - ('python-louvain', '0.15', { - 'source_urls': ['https://pypi.python.org/packages/source/p/python-louvain'], - 'checksums': ['2a856edfbe29952a60a5538a84bb78cca18f6884a88b9325e85a11c8dd4917eb'], - 'modulename': 'community', - }), - ('tqdm', '4.56.0', { - 'source_urls': ['https://pypi.python.org/packages/source/t/tqdm'], - 'modulename': 'tqdm', - }), - ('torch_geometric', version, { - 'checksums': ['347f693bebcc8a621eda4867dafab91c04db5f596d7ed7ecb89b242f8ab5c6a1'], - }), -] - -sanity_pip_check = True - -moduleclass = 'devel' diff --git a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_80.patch b/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_80.patch deleted file mode 100644 index c80da06e0469c2c613424a4df95950c022c60895..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_cluster-1.5.8-sm_80.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:10:11.609352000 +0100 -+++ setup.py 2021-01-20 10:10:37.525550350 +0100 -@@ -39,7 +39,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_80,code=sm_80', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_80.patch b/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_80.patch deleted file mode 100644 index ec5521c3024f876be8c5f6999256757e7d0631ec..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_scatter-2.0.5-sm_80.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 09:53:22.085271000 +0100 -+++ setup.py 2021-01-20 09:53:54.835241801 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_80,code=sm_80', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_80.patch b/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_80.patch deleted file mode 100644 index 5439544a81cb588a07218faeb89272c07d9b2595..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_sparse-0.6.8-sm_80.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:07:15.518446000 +0100 -+++ setup.py 2021-01-20 10:07:51.389877000 +0100 -@@ -53,7 +53,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_80,code=sm_80', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - if sys.platform == 'win32': diff --git a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_80.patch b/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_80.patch deleted file mode 100644 index a3ae24b363e5bce2e2937c14301af194bd14cc14..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/PyTorch-Geometric/torch_spline_conv-1.2.0-sm_80.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- setup_orig.py 2021-01-20 10:12:33.326687000 +0100 -+++ setup.py 2021-01-20 10:12:51.492198482 +0100 -@@ -26,7 +26,7 @@ - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - nvcc_flags = [] if nvcc_flags == '' else nvcc_flags.split(' ') -- nvcc_flags += ['-arch=sm_35', '--expt-relaxed-constexpr'] -+ nvcc_flags += ['-gencode=arch=compute_80,code=sm_80', '--expt-relaxed-constexpr'] - extra_compile_args['nvcc'] = nvcc_flags - - extensions_dir = osp.join(osp.dirname(osp.abspath(__file__)), 'csrc') diff --git a/Overlays/juwelsbooster_overlay/p/pscom/pscom-5.4-default.eb b/Overlays/juwelsbooster_overlay/p/pscom/pscom-5.4-default.eb deleted file mode 100644 index 309d976fd1e0697947bee82968b946272d646123..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/pscom/pscom-5.4-default.eb +++ /dev/null @@ -1,50 +0,0 @@ -easyblock = 'CMakeMake' - -name = 'pscom' -# Create drop-in replacement version that ensures over-riding behaviour -version = "5.4-default" -local_realversion = "5.4.7-1" -homepage = 'http://www.par-tec.com' -description = """ParaStation is a robust and efficient cluster middleware, consisting of a high-performance -communication layer (MPI) and a sophisticated management layer. -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM - -source_urls = ['https://github.com/ParaStation/%(name)s/archive/'] -sources = ['%%(name)s-%s.tar.bz2' % local_realversion] - -builddependencies = [ - ('popt', '1.16'), - ('CUDA', '11.0'), - ('CMake', '3.18.0'), -] - -dependencies = [ - ('UCX', '1.9.0'), -] - -build_type = 'RelWithDebInfo' - -preconfigopts = 'export UCP_LDFLAGS="-L$EBROOTUCX/lib" && ' -preconfigopts += 'export CUDA_LDFLAGS="-L$EBROOTNVIDIA/lib64" &&' - -configopts = '-DCUDA_ENABLED=ON' - -sanity_check_paths = { - 'files': [ - 'include/%(name)s.h', - ('lib/libpscom.so', 'lib64/libpscom.so'), - ('lib/libpscom4ucp.so', 'lib64/libpscom4ucp.so'), - ('lib/libpscom4openib.so', 'lib64/libpscom4openib.so'), - ], - 'dirs': [], -} - -modextravars = { - 'PSCOMVERSION': '%s' % local_realversion, -} - -moduleclass = 'tools' diff --git a/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb b/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb deleted file mode 100644 index 4c0c2ddc4ebb07193099d57e3daf2e0f56a49a93..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA-low-latency.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default ParaStationMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'UCX_MAX_RNDV_RAILS': '1', -} - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb b/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb deleted file mode 100644 index 77094b63703cd8f1d4b5d490ae82cc99f5ab3d9e..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-CUDA.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4' -versionsuffix = 'CUDA' - -homepage = '' -description = 'This is a module to load the default ParaStationMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'UCX_MAX_RNDV_RAILS': '1', -} - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb b/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb deleted file mode 100644 index af53fdc9cd8e1bb789a315e13a27e0581e0fa18f..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA-low-latency.eb +++ /dev/null @@ -1,40 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'CUDA-low-latency' - -homepage = '' -description = '''This is a module to load the default ParaStationMPI configuration - -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -modloadmsg = ''' -This module is otherwise equivalent to mpi-settings/CUDA, but enables UCX_MEMTYPE_CACHE. Please read the URL below to -understand if this is something you can use: - -http://openucx.github.io/ucx/faq.html#7-what-are-the-current-limitations-of-using-gpu-memory -''' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'y', - 'UCX_MAX_RNDV_RAILS': '1', -} - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb b/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb deleted file mode 100644 index c5b260f0bd9608cebaea54a357eccb38622b8b28..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/p/psmpi-settings/psmpi-settings-5.4-mt-CUDA.eb +++ /dev/null @@ -1,27 +0,0 @@ -easyblock = 'SystemBundle' - -name = 'psmpi-settings' -version = '5.4-mt' -versionsuffix = 'CUDA' - -homepage = '' -description = 'This is a module to load the default ParaStationMPI configuration' - -site_contacts = 'd.alvarez@fz-juelich.de' - -toolchain = SYSTEM - -source_urls = [] - -sources = [] -modextravars = { - 'PSP_CUDA': '1', - 'PSP_SHM': '0', - 'PSP_UCP': '1', - 'PSP_HARD_ABORT': '1', - 'UCX_TLS': 'rc_x,cuda_ipc,gdr_copy,self,sm,cuda_copy', - 'UCX_MEMTYPE_CACHE': 'n', - 'UCX_MAX_RNDV_RAILS': '1', -} - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/u/UCX/UCX-1.8.1.eb b/Overlays/juwelsbooster_overlay/u/UCX/UCX-1.8.1.eb deleted file mode 100644 index 740e9d2e3d5f84bb1f22280bac5c90f32d8f4e33..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/u/UCX/UCX-1.8.1.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.8.1' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -# configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support -configopts += '--without-cm ' # Disable IB CM - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/u/UCX/UCX-1.9.0.eb b/Overlays/juwelsbooster_overlay/u/UCX/UCX-1.9.0.eb deleted file mode 100644 index be5e7d345e87b84378bcbf8af348c6888b20c666..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/u/UCX/UCX-1.9.0.eb +++ /dev/null @@ -1,68 +0,0 @@ -easyblock = 'ConfigureMake' - -name = 'UCX' -version = '1.9.0' - -homepage = 'http://www.openucx.org' - -description = """Unified Communication X -An open-source production grade communication framework for data centric -and high-performance applications -""" - -site_contacts = 'Damian Alvarez <d.alvarez@fz-juelich.de>' - -toolchain = SYSTEM -toolchainopts = {'pic': True} - -source_urls = ['https://github.com/openucx/ucx/releases/download/v%(version)s'] -sources = ['%(namelower)s-%(version)s.tar.gz'] - -builddependencies = [ - ('binutils', '2.34'), - ('pkg-config', '0.29.2'), -] - -osdependencies = [ - # needed for --with-verbs - ('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), -] - -dependencies = [ - ('numactl', '2.0.13'), - ('CUDA', '11.0'), -] - -configopts = '--with-verbs ' # Build OpenFabrics support -configopts += '--without-java ' -configopts += '--disable-doxygen-doc ' - -configopts += '--enable-optimizations ' # Enable machine-specific optimizations, default: NO -# configopts += '--enable-tuning ' # Enable parameter tuning in run-time, default: NO -configopts += '--enable-mt ' # Enable thread support in UCP and UCT, default: NO -configopts += '--disable-debug ' -configopts += '--disable-logging ' -configopts += '--disable-assertions ' -configopts += '--disable-params-check ' -configopts += '--disable-dependency-tracking ' -configopts += '--with-cuda=$EBROOTCUDA ' - -configopts += '--enable-cma ' # Enable Cross Memory Attach - -configopts += '--with-rc ' # Compile with IB Reliable Connection support -configopts += '--with-ud ' # Compile with IB Unreliable Datagram support -configopts += '--with-dc ' # Compile with IB Dynamic Connection support -configopts += '--with-mlx5-dv ' # Compile with mlx5 Direct Verbs support -configopts += '--with-ib-hw-tm ' # Compile with IB Tag Matching support -configopts += '--with-dm ' # Compile with Device Memory support -configopts += '--without-cm ' # Disable IB CM - -configopts += '--with-avx ' # Compile with AVX -configopts += '--with-gdrcopy ' # Compile with GDRCopy - -sanity_check_paths = { - 'files': ['bin/ucx_info', 'bin/ucx_perftest', 'bin/ucx_read_profile'], - 'dirs': ['include', 'lib', 'share'] -} - -moduleclass = 'system' diff --git a/Overlays/juwelsbooster_overlay/v/Vampir/Vampir-9.9.0.eb b/Overlays/juwelsbooster_overlay/v/Vampir/Vampir-9.9.0.eb deleted file mode 100644 index 42676131160b2e95bb2f113cdc4eb5d25854c707..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/v/Vampir/Vampir-9.9.0.eb +++ /dev/null @@ -1,40 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'Binary' - -name = "Vampir" -version = "9.9.0" -local_archsuffix = "-linux-x86_64" - -homepage = 'http://www.vampir.eu' -description = """The VAMPIR software tool provides an easy-to-use framework that enables -developers to quickly display and analyze arbitrary program behavior at any level of detail. -The tool suite implements optimized event analysis algorithms and customizable displays that -enable fast and interactive rendering of very complex performance monitoring data. - -""" - -site_contacts = ['Michael Knobloch <m.knobloch@fz-juelich.de>', 'Software Analysis and Tools <swat@fz-juelich.de>'] - -toolchain = SYSTEM - -sources = ['vampir-%s%s-setup.sh' % (version, local_archsuffix)] - -install_cmd = './vampir-%(version)s-linux-x86_64-setup.sh --silent --instdir=%(installdir)s' - -sanity_check_paths = { - 'files': ["bin/vampir", "doc/vampir-manual.pdf"], - 'dirs': [] -} - -modextravars = { - 'VAMPIR_LICENSE': '/p/software/juwelsbooster/licenses/vampir/vampir.license', -} - -moduleclass = 'tools' diff --git a/Overlays/juwelsbooster_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb b/Overlays/juwelsbooster_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb deleted file mode 100644 index 239a857fd6b511eda9c07f2907c3a5537f988b48..0000000000000000000000000000000000000000 --- a/Overlays/juwelsbooster_overlay/v/VampirServer/VampirServer-9.9.0-gpsmpi-2020.eb +++ /dev/null @@ -1,61 +0,0 @@ -# This is an easyconfig file for EasyBuild, see https://github.com/hpcugent/easybuild -# Copyright:: Copyright 2013 Juelich Supercomputing Centre, Germany -# Authors:: Bernd Mohr <b.mohr@fz-juelich.de> -# License:: New BSD -# -# This work is based from experiences from the UNITE project -# http://apps.fz-juelich.de/unite/ -## -easyblock = 'Binary' - -name = "VampirServer" -version = "9.9.0" - -homepage = 'http://www.vampir.eu' -description = """The VAMPIR software tool provides an easy-to-use framework that enables -developers to quickly display and analyze arbitrary program behavior at any level of detail. -The tool suite implements optimized event analysis algorithms and customizable displays that -enable fast and interactive rendering of very complex performance monitoring data. -""" - -usage = """ -To start VampirServer -module load Vampir VampirServer -vampir & -BATCH_OPT="--account=<budget> --partition=<partition>" vampirserver start -n 4 mpi -(note server + port + server_id) -- Use it -Vampir GUI-> open other -> remote file -> server + port -- To stop VampirServer -vampirserver stop <server_id> -""" - -site_contacts = ['Michael Knobloch <m.knobloch@fz-juelich.de>', 'Software Analysis and Tools <swat@fz-juelich.de>'] - -toolchain = {'name': 'gpsmpi', 'version': '2020'} - -toolchainopts = {"usempi": True} - -sources = ['vampirserver-%s-linux-x86_64-setup.sh' % (version)] - -install_cmd = ('./vampirserver-%(version)s-linux-x86_64-setup.sh --silent --instdir=%(installdir)s ' - '&& %(installdir)s/bin/vampirserver config --silent') - -sanity_check_paths = { - 'files': ["bin/vampirserver", "doc/vampirserver-manual.pdf"], - 'dirs': [] -} - -# Remove Cray-specific 'ap' launcher, -# use SLURM launcher as MPI launcher and default -postinstallcmds = [ - 'rm %(installdir)s/etc/server/launcher/ap', - '''sed -i s/'BATCH_OPT=""'/'#BATCH_OPT=""'/g %(installdir)s/etc/server/launcher/custom/slurm''', - 'cp %(installdir)s/etc/server/launcher/custom/slurm %(installdir)s/etc/server/launcher/mpi', -] - -modextravars = { - 'VAMPIR_LICENSE': '/p/software/juwelsbooster/licenses/vampir/vampir.license', -} - -moduleclass = 'perf'